Compare commits

..

No commits in common. "develop" and "rei/symlinks" have entirely different histories.

59 changed files with 389 additions and 3125 deletions

View File

@ -1 +0,0 @@
sallie.librepush.net,79.143.178.141 ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBHPJu+9XaDuDQA8jVdD++BSkazgVUt1c22oa+FoWiqWVWAVu33+Jh3Evc4s3HK6jMMuOIVs8AVnGAUY5eaqmJik=

View File

@ -1,24 +0,0 @@
kind: pipeline
name: default
platform:
os: linux
arch: arm64
steps:
- name: deploy manual
image: rust:1.53.0-slim
when:
branch:
- develop
environment:
DEPLOY_SSH_KEY:
from_secret: DEPLOY_SSH_KEY
commands: # warning! Doesn't seem to like hashed known_hosts...
- cargo install mdbook
- cd docs
- mdbook build
- echo "$DEPLOY_SSH_KEY" > /tmp/rsync_key
- chmod u=rw,go= /tmp/rsync_key
- rsync -e 'ssh -i /tmp/rsync_key -o "UserKnownHostsFile .ci/known_hosts"' -vaz --delete ./docs/book space_docs_librepush_net@sallie.librepush.net:./public/scone
- rm /tmp/rsync_key

4
.gitignore vendored
View File

@ -2,6 +2,4 @@
__pycache__ __pycache__
/scone.egg-info /scone.egg-info
/dist /dist
/venv
/.venv
/book

View File

@ -1,16 +0,0 @@
platform: linux/arm64
pipeline:
deployManual:
image: docker.bics.ga/rei_ci/mdbook:latest-arm64
when:
branch:
- develop
secrets:
- deploy_ssh_key
commands: # warning! Doesn't seem to like hashed known_hosts...
- mdbook build
- echo "$DEPLOY_SSH_KEY" > /tmp/rsync_key
- chmod u=rw,go= /tmp/rsync_key
- rsync -e 'ssh -i /tmp/rsync_key -o "UserKnownHostsFile ./.ci/known_hosts"' -vaz --delete ./book/ space_docs_librepush_net@sallie.librepush.net:./public/scone
- rm /tmp/rsync_key

View File

@ -1,3 +0,0 @@
recursive-include scone *.tx
recursive-include scone *.j2

View File

@ -1,17 +0,0 @@
# scone: Server CONfiguration Engine
Scone is a tool for configuring servers (or even other computers).
The idea is that you write a declarative configuration using Scone Menu Language (.scoml) files and run `scone` on it.
Scone will, ideally speaking, do whatever is necessary to bring your declarative configuration into reality.
You can track your configuration in `git`, for example, and should be able to re-apply this to your server whenever you need to recreate it (or update it!).
Scone tries to be quick, owing to the fact that iterative development is more engaging and productive.
To do this, Scone runs multiple recipes (jobs) in parallel (Scone has a fairly rich dependency tracking system to allow ordering recipes fairly intuitively to allow this). Scone also caches outcomes of recipes, so a small change to the configuration does NOT need to re-execute all recipes afterwards.
Scone is currently alpha-grade software; the author uses it to configure their servers (fully), mobile phone (mostly fully) and laptop/desktop computers (partially).

View File

@ -1,15 +0,0 @@
[book]
authors = ["Olivier"]
language = "en"
multilingual = false
src = "docs"
title = "Scone Documentation"
description = "Documentation for Scone (Server CONfiguration Engine)"
[output.html]
default-theme = "rust"
git-repository-url = "https://bics.ga/reivilibre/scone"
git-repository-icon = "fa-git-alt"
fold = { enable = true, level = 1 }
# TODO scoml and toml highlighting

View File

@ -1,43 +0,0 @@
#!/bin/sh -eu
# Usage: ./contrib/install_scone.py someuser@somehost
# NOTE: you need sudo privilege.
# try: # echo 'scone ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/scone
# # chmod 0440 /etc/sudoers.d/scone
# # visudo -c
# TODO need to open up rights to various dirs so that other users can access
pyvenv='~/.scone/venv'
remote=$1
version='0.2.0'
# create a source distribution
python3 setup.py sdist
if ! ssh -q "$remote" -- dpkg-query -W -f="'"'${Status}'"'" python3-venv | grep "ok installed"; then
ssh -qt "$remote" -- sudo apt-get install -yq python3 python3-venv build-essential
else
echo "python3-venv already installed"
fi
if ! ssh -q "$remote" -- test -d "$pyvenv"; then
# create the dir
ssh -q "$remote" -- mkdir -p "$pyvenv"
# install Python
ssh -q "$remote" -- python3 -m venv "$pyvenv"
fi
# copy sdist
scp "./dist/scone-$version.tar.gz" "$remote:~/.scone/"
# install sdist
ssh -q "$remote" -- "$pyvenv/bin/pip" install "~/.scone/scone-$version.tar.gz[sous]" || :
# don't need any special treatment for now, blank toml will do
ssh -q "$remote" -- touch "~/.scone/scone.sous.toml"
# create the worktop dir
ssh -q "$remote" -- mkdir -p "~/.scone/worktop"
ssh -q "$remote" -- chmod ugo+rX "~" "~/.scone/" "~/.scone/worktop"
ssh -q "$remote" -- chmod -R ugo+rX "~/.scone/venv"
ssh -q "$remote" -- chmod ugo+w,+t "~/.scone/worktop"

View File

@ -1,43 +0,0 @@
#!/bin/sh -eu
# Usage: ./contrib/install_scone.py someuser@somehost
# NOTE: you need sudo privilege.
# try: # echo 'scone ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/scone
# # chmod 0440 /etc/sudoers.d/scone
# # visudo -c
# TODO need to open up rights to various dirs so that other users can access
pyvenv='~/.scone/venv'
remote=$1
version='0.1.0'
# create a source distribution
python3 setup.py sdist
#if ! ssh -q "$remote" -- dpkg-query -W -f="'"'${Status}'"'" python3-venv | grep "ok installed"; then
# ssh -qt "$remote" -- sudo apt-get install python3 python3-venv
#else
# echo "python3-venv already installed"
#fi
if ! ssh -q "$remote" -- test -d "$pyvenv"; then
# create the dir
ssh -q "$remote" -- mkdir -p "$pyvenv"
# install Python
ssh -q "$remote" -- python3 -m venv "$pyvenv"
fi
# copy sdist
scp "./dist/scone-$version.tar.gz" "$remote:~/.scone/"
# install sdist
ssh -q "$remote" -- "$pyvenv/bin/pip" install "~/.scone/scone-$version.tar.gz[sous]" || :
# don't need any special treatment for now, blank toml will do
ssh -q "$remote" -- touch "~/.scone/scone.sous.toml"
# create the worktop dir
ssh -q "$remote" -- mkdir -p "~/.scone/worktop"
ssh -q "$remote" -- chmod ugo+rX "~" "~/.scone/" "~/.scone/worktop"
ssh -q "$remote" -- chmod -R ugo+rX "~/.scone/venv"
ssh -q "$remote" -- chmod ugo+w,+t "~/.scone/worktop"

View File

@ -1,9 +0,0 @@
KDE Syntax Highlighter Files for Scone Menu Language
====================================================
To install:
```
mkdir -p ~/.local/share/org.kde.syntax-highlighting/syntax/
cp scoml.xml ~/.local/share/org.kde.syntax-highlighting/syntax/
```

View File

@ -1,201 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--
~ Copyright 2020, Olivier 'reivilibre'.
~
~ This file is part of Scone.
~
~ Scone is free software: you can redistribute it and/or modify
~ it under the terms of the GNU General Public License as published by
~ the Free Software Foundation, either version 3 of the License, or
~ (at your option) any later version.
~
~ Scone is distributed in the hope that it will be useful,
~ but WITHOUT ANY WARRANTY; without even the implied warranty of
~ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
~ GNU General Public License for more details.
~
~ You should have received a copy of the GNU General Public License
~ along with Scone. If not, see <https://www.gnu.org/licenses/>.
-->
<!DOCTYPE language SYSTEM "language.dtd"
[
<!ENTITY more "(_\d+)*">
<!ENTITY int "[+-]?(0|[1-9]\d*)&more;">
<!ENTITY frac "\.\d+&more;">
<!ENTITY exp "[eE][+-]?\d+&more;">
<!ENTITY offset "[+-]\d\d:\d\d">
<!ENTITY time "\d\d:\d\d:\d\d(\.\d+)?(&offset;|Z)?">
<!ENTITY datetime "\d\d\d\d-\d\d-\d\d(T&time;)?">
]>
<!-- https://github.com/toml-lang/toml -->
<!-- This is a derivative of the TOML highlighter by flying-sheep@web.de under the LGPLv2+ -->
<language name="Scone Menu Language (ScoML)" section="Configuration" extensions="*.scoml" mimetype="text/x-scone-menu" version="1" kateversion="5.0" author="flying-sheep@web.de, Olivier 'reivilibre'" license="LGPLv2+">
<highlighting>
<list name="bools">
<item>true</item>
<item>false</item>
</list>
<list name="directives">
<item>@user</item>
<item>@sous</item>
<item>@for</item>
<item>@after</item>
</list>
<contexts>
<context attribute="Error" lineEndContext="#stay" name="Scoml">
<IncludeRules context="BlockCommon"/>
</context>
<!-- recipe headers -->
<context attribute="RecipeHeader" lineEndContext="Recipe" name="RecipeHeader">
<Detect2Chars attribute="RecipeHeader" context="RecipeHeaderHuman" char="]" char1="]" />
<DetectSpaces attribute="Whitespace"/>
<!-- <DetectChar attribute="TableHeader" context="#stay" char="."/> -->
<RegExpr attribute="RecipeName" context="#stay" String="[\w-]+"/>
<!--<DetectChar attribute="Key" context="QuotedKey" char="&quot;"/>-->
</context>
<context attribute="Human" name="RecipeHeaderHuman" lineEndContext="#pop!Recipe">
<RegExpr context="#stay" String="[^\n]+"/>
</context>
<context attribute="Error" name="Recipe" lineEndContext="#stay">
<DetectSpaces attribute="Whitespace"/>
<Detect2Chars attribute="RecipeHeader" context="#pop!RecipeHeader" char="[" char1="[" endRegion="Recipe" beginRegion="Recipe" />
<DetectChar attribute="SubBlockHeader" context="#pop!SubBlockHeader" char="{" beginRegion="SubBlock" endRegion="Recipe" />
<DetectChar context="#pop" char="}" endRegion="Recipe" lookAhead="true" />
<!--<DetectChar attribute="TableHeader" context="TableHeader" char="[" endRegion="Table"/>-->
<RegExpr attribute="Key" context="#stay" String="[\w-]+" firstNonSpace="true"/>
<DetectChar attribute="Key" context="QuotedKey" char="&quot;" firstNonSpace="true"/>
<DetectChar attribute="Assignment" context="Value" char="="/>
<DetectChar char="#" attribute="Comment" context="Comment"/>
<keyword attribute="Directive" String="directives" context="#stay"/> <!-- todo recognise the stuff after a directive too -->
</context>
<!-- sub blocks -->
<context attribute="Error" lineEndContext="#stay" name="SubBlock">
<DetectChar attribute="SubBlock" context="#pop" char="}" endRegion="SubBlock"/>
<IncludeRules context="BlockCommon"/>
</context>
<context attribute="SubBlockHeader" lineEndContext="SubBlock" name="SubBlockHeader">
<RegExpr attribute="Human" context="#stay" String="[^\n]+"/>
</context>
<context attribute="Block" lineEndContext="#stay" name="BlockCommon">
<DetectSpaces attribute="Whitespace"/>
<Detect2Chars attribute="RecipeHeader" context="RecipeHeader" char="[" char1="[" beginRegion="Recipe" />
<DetectChar attribute="SubBlockHeader" context="SubBlockHeader" char="{" beginRegion="SubBlock"/>
<!--<DetectChar attribute="TableHeader" context="TableHeader" char="[" endRegion="Table"/>-->
<RegExpr attribute="Key" context="#stay" String="[\w-]+" firstNonSpace="true"/>
<DetectChar attribute="Key" context="QuotedKey" char="&quot;" firstNonSpace="true"/>
<DetectChar attribute="Assignment" context="Value" char="="/>
<DetectChar char="#" attribute="Comment" context="Comment"/>
<keyword attribute="Directive" String="directives" context="#stay"/> <!-- todo recognise the stuff after a directive too -->
</context>
<!-- values -->
<context attribute="Normal Text" lineEndContext="#pop" fallthrough="true" fallthroughContext="#pop" name="Value">
<DetectSpaces attribute="Whitespace"/>
<RegExpr attribute="Date" context="#stay" String="&datetime;"/>
<keyword attribute="Bool" String="bools" context="#stay"/>
<RegExpr attribute="Float" context="#stay" String="&int;(&frac;&exp;|&frac;|&exp;)"/>
<RegExpr attribute="Int" context="#stay" String="&int;"/>
<StringDetect attribute="String" context="MultilineString" String="&quot;&quot;&quot;"/>
<DetectChar attribute="String" context="String" char="&quot;"/>
<StringDetect attribute="String" context="LitMultilineString" String="'''"/>
<DetectChar attribute="String" context="LitString" char="'"/>
<DetectChar attribute="Array" context="Array" char="["/>
<DetectChar attribute="InlineTable" context="InlineTable" char="{"/>
<DetectChar char="#" attribute="Comment" context="Comment"/>
</context>
<context attribute="Comment" lineEndContext="#pop" name="Comment">
<DetectSpaces/>
<IncludeRules context="##Alerts" />
<IncludeRules context="##Modelines" />
<DetectIdentifier/>
</context>
<!-- Quoted keys and Strings -->
<context attribute="Key" lineEndContext="#pop" name="QuotedKey">
<LineContinue attribute="Escape" context="#stay"/>
<RegExpr attribute="Escape" String="\\[btnfr&quot;\\]" context="#stay" />
<RegExpr attribute="Escape" String="\\(u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8})" context="#stay" />
<RegExpr attribute="Error" String="\\." context="#stay" />
<DetectChar attribute="Key" context="#pop" char="&quot;"/>
</context>
<context attribute="String" lineEndContext="#pop" name="String">
<LineContinue attribute="Escape" context="#stay"/>
<RegExpr attribute="Escape" String="\\[btnfr&quot;\\]" context="#stay" />
<RegExpr attribute="Escape" String="\\(u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8})" context="#stay" />
<RegExpr attribute="Error" String="\\." context="#stay" />
<DetectChar attribute="String" context="#pop" char="&quot;"/>
</context>
<context attribute="String" lineEndContext="#stay" name="MultilineString">
<LineContinue attribute="Escape" context="#stay"/>
<RegExpr attribute="Escape" String="\\[btnfr&quot;\\]" context="#stay" />
<RegExpr attribute="Escape" String="\\(u[0-9a-fA-F]{4}|U[0-9a-fA-F]{8})" context="#stay" />
<RegExpr attribute="Error" String="\\." context="#stay" />
<StringDetect attribute="String" context="#pop" String="&quot;&quot;&quot;"/>
</context>
<context attribute="LitString" lineEndContext="#pop" name="LitString">
<DetectChar attribute="String" context="#pop" char="'"/>
</context>
<context attribute="LitString" lineEndContext="#stay" name="LitMultilineString">
<StringDetect attribute="String" context="#pop" String="'''"/>
</context>
<!-- Arrays -->
<context attribute="Array" lineEndContext="#stay" name="Array">
<IncludeRules context="Value" />
<DetectChar context="#pop" attribute="Array" char="]" />
<DetectChar context="#stay" attribute="NextEntry" char="," />
</context>
<context attribute="InlineTable" lineEndContext="#stay" name="InlineTable">
<RegExpr attribute="Key" context="#stay" String="[\w-]+"/>
<DetectChar attribute="Key" context="QuotedKey" char="&quot;"/>
<DetectChar attribute="Assignment" context="Value" char="="/>
<DetectChar char="#" attribute="Comment" context="Comment"/>
<DetectChar context="#pop" attribute="InlineTable" char="}" />
<DetectChar context="#stay" attribute="NextEntry" char="," />
</context>
</contexts>
<itemDatas>
<itemData name="Normal Text" defStyleNum="dsNormal"/>
<itemData name="Key" defStyleNum="dsDataType"/>
<itemData name="RecipeHeader" defStyleNum="dsKeyword"/>
<itemData name="RecipeName" defStyleNum="dsControlFlow"/>
<itemData name="Assignment" defStyleNum="dsOperator"/>
<itemData name="Comment" defStyleNum="dsComment"/>
<itemData name="Date" defStyleNum="dsBaseN"/>
<itemData name="Float" defStyleNum="dsFloat"/>
<itemData name="Int" defStyleNum="dsDecVal"/>
<itemData name="Bool" defStyleNum="dsConstant"/>
<itemData name="String" defStyleNum="dsString"/>
<itemData name="LitString" defStyleNum="dsVerbatimString"/>
<itemData name="Escape" defStyleNum="dsSpecialChar"/>
<itemData name="Array" defStyleNum="dsOperator"/>
<itemData name="InlineTable" defStyleNum="dsOperator"/>
<itemData name="NextEntry" defStyleNum="dsOperator"/>
<itemData name="SubBlockHeader" defStyleNum="dsKeyword"/>
<itemData name="Directive" defStyleNum="dsPreprocessor"/>
<itemData name="Human" defStyleNum="dsAnnotation"/>
<itemData name="Whitespace" defStyleNum="dsNormal"/>
<itemData name="Error" defStyleNum="dsError"/>
</itemDatas>
</highlighting>
<general>
<comments>
<comment name="singleLine" start="#" position="afterwhitespace" />
</comments>
</general>
</language>
<!-- kate: replace-tabs off; -->

View File

@ -1,26 +0,0 @@
# Summary
- [Scone](./index.md)
- [Architecture](./architecture.md)
- [Getting Started](./starting/index.md)
- [Installing scone](./starting/install.md)
- [Creating your first head chef and restaurant](./starting/head.md)
- [Installing scone to a server and creating a sous chef](./starting/sous.md)
- [Cooking your first menu](./starting/first_cook.md)
- [Using the Fridge and Freezer](./starting/fridge_freezer.md)
- [Recipes](./recipes/index.md)
- [Declare](./recipes/declare.md)
- [Operating System Recipes](./recipes/os/index.md)
- [Systemd](./recipes/os/systemd.md)
- [Users](./recipes/os/users.md)
- [Filesystem Recipes](./recipes/filesystem/index.md)
- [Fridge Recipes](./recipes/filesystem/fridge.md)
- [Git Recipes](./recipes/filesystem/git.md)
- [Database Recipes](./recipes/database/index.md)
- [Postgres](./recipes/database/postgres.md)
- [Environments](./recipes/environments/index.md)
- [Docker](./recipes/environments/docker.md)
- [Python](./recipes/environments/python.md)
- [Resources](./resources/index.md)
- [Filesystem Resources](./resources/filesystem.md)
- [Utensils]()

View File

@ -1,21 +0,0 @@
# Architecture
A controlling host, called the `head` chef, is responsible for sending commands to other hosts over SSH.
The hosts being controlled are called `sous` chefs.
The head chef owns all the instructions and data needed to set up the sous chefs as desired.
Recipes are arranged in a directed acyclic graph (DAG); recipes can depend on each other so that they are ran sequentially (otherwise, recipes will be run in parallel, because time is precious).
'Resources' are also present in the DAG and are allowed to have edges between themselves and recipes. The conceptual idea is that some recipes **provide** resources (such as files) and others **need** them.
## Definitions
* **restaurant**: Collection of head configuration, sous configuration, menus and a fridge. It is the whole repository of configuration.
* **head**: Host that instructs other hosts
* **sous**: Host that is instructed by other hosts
* **recipe**: A job that produces something, such as a file, or a certain configuration.
* **menu**: A collection of recipes
* **fridge**: Store of 'ingredients' — data (files, templates and variables) needed to cook recipes.
* **freezer**: Like a fridge, but encrypted on the host
* **supermarket**: Source of ingredients which can't be kept persistently in the fridge for some reason.

View File

@ -1,11 +0,0 @@
# Scone
Scone is a tool to help you set up your server(s) reliably and automatically.
You can write the configuration needed to bring your services up and use these
to configure your servers again and again.
This configuration is declarative and can be tracked in version control.
Scone is still an alpha project.

View File

@ -1,65 +0,0 @@
# Declare
Sometimes we need to tell Scone that a resource already exists on the sous, and that scone doesn't need it to be provided by the menu.
The solution to this is to have no-operation recipes that provide these resources.
| Recipe | Needs | Provides |
| -----: | ----- | -------- |
| [`declare-os-user`](#declare-os-user) | | `os-user` |
| [`declare-dir`](#declare-dir) | | `directory` |
| [`declare-file`](#declare-file) | | `file` |
## `declare-os-user`
**Preconditions**: the OS user *must already exist on the sous*.
**Provides**: `os-user(?)` where `?` is the argument `name`.
| Argument | Accepted Values | Default | Description |
| -------: | --------------- | ------- | ----------- |
| name | any username string | *required* | Username of the OS (e.g. Linux) user that already exists. |
### Example
```scoml
[[declare-os-user]]
name = root
```
## `declare-dir`
**Preconditions**: the specified directory *must already exist on the sous*.
**Provides**: `directory(?)` where `?` is the argument `path`.
| Argument | Accepted Values | Default | Description |
| -------: | --------------- | ------- | ----------- |
| path | any path string | *required* | Path of the directory that already exists. |
### Example
```scoml
[[declare-dir]]
path = /etc/systemd/system
```
## `declare-file`
**Preconditions**: the specified file *must already exist on the sous*.
**Provides**: `file(?)` where `?` is the argument `path`.
| Argument | Accepted Values | Default | Description |
| -------: | --------------- | ------- | ----------- |
| path | any path string | *required* | Path of the file that already exists. |
### Example
```scoml
[[declare-file]]
path = /etc/passwd
```

View File

@ -1,52 +0,0 @@
# Docker
This is work-in-progress integration with Docker for Scone.
For this set of recipes to work, you will need the sous to have the `docker`
Python package installed; for this, you can use `pip install docker` with the
scone virtualenv activated. (TODO link to better documentation about installing
other Python deps in a scone venv.)
| Recipe | Needs | Provides |
| -----: | ----- | -------- |
| [`docker-container`](#docker-container) | | `docker_container` |
## `docker-container`
**Preconditions**: the image must be available or downloadable
**Provides**: `docker-container(?)` where `?` is the argument `name`.
| Argument | Accepted Values | Default | Description |
| -------: | --------------- | ------- | ----------- |
| image | any Docker image that exists or can be pulled | *required* | This name is used to specify what image to install. |
| name | any ID string | *required* | This name identifies the container and is passed to docker. |
| command | any command string | *optional* | If specified, this sets the command that will be run by the container. |
| ports | dictionary of "(container port)/tcp" or "(container port)/udp" to {host = "(host address)", port = "(host port)"} | empty | This mapping describes how ports are published from inside the container to the host. |
| volumes | dictionary of "(volume name)" or "/path/to/binding/on/host" to {bind = "/path/in/container", mode = "rw" or "ro"} | empty | This mapping describes what filesystem resources are mounted into the container. |
| environment | dictionary of "(key)" to value | empty | This mapping describes what environment variables are given to the container. |
| restart_policy | "always" or "on-failure" | "on-failure" | This specifies the container's restart policy. |
### Example
```scoml
[[docker-container]]
image = "org/image:1"
name = "mycontainer"
ports = {
"80/tcp" = {
"host" = "127.0.0.1",
"port" = 4080
}
}
volumes = {
"/var/lib/mycontainer" = {
bind = "/data",
mode = "rw"
}
}
environment = {
MYCONTAINER_MODE = "production"
}
restart_policy = "always"
```

View File

@ -1,34 +0,0 @@
# Python
Python is a programming language. Scone supports creating virtual Python environments to isolate dependencies.
| Recipe | Needs | Provides |
| -----: | ----- | -------- |
| [`python-venv`](#python-venv) | `directory` | `directory` `file`? `directory`? |
## `python-venv`
Creates a Python virtual environment, installing the specified dependencies.
**Provides:** `directory({dir})` and `file({dir})/bin/python`. In the future, there may be the ability to check for (and expose) other executables in the venv.
| Argument | Accepted Values | Default | Description |
| -------: | --------------- | ------- | ----------- |
| dir | path | *required* | This is the directory to create the virtual environment in. |
| interpreter | string or path | *required* | This specifies the desired virtual interpreter. Usually this is `python3`. |
| install | list of requirements | *required* | This is a list of Python dependencies to install. You should specify `dir /path/to/dir` for directories, `git /path/to/dir` for local git repositories and `-r /path/to/requirements.txt` for requirements files. |
`dir` and `git` requirements are currently synonymous and ensure that the venv will keep up to date using the specified directory. For now, `python-venv` is unable to cache execution if directories are in use (though this is rarely a problem). Use of the `git` keyword should be preferred for git repositories as that may allow for better performance in the future.
### Example
```scoml
[[python-venv]]
dir = "/home/abc/venv"
interpreter = "python3"
install:
- "dir /home/abc/abc"
- "-r /home/abc/requirements.txt"
- "hypercorn"
```

View File

@ -1,40 +0,0 @@
# Git
Git is a version control system.
| Recipe | Needs | Provides |
| -----: | ----- | -------- |
| [`git`](#git) | `directory` | `directory` `file`? `directory`? |
## `git`
Checks out a Git repository to a location on disk.
You must specify exactly one of `ref` or `branch`.
**Preconditions:** The remote repository must exist and be accessible. The specified commit, tag or branch must exist.
**Postconditions:** The repository will be checked out on disk at the specified commit.
**Provides:** `directory({dest})`, `file({dest}/{each file entry in expect})`, `directory({dest}/{each dir entry in expect})`
| Argument | Accepted Values | Default | Description |
| -------: | --------------- | ------- | ----------- |
| src | string | *required* | This is the URL to the git 'remote'. Common formats are `https://gitplace.example.org/user/repo.git` or `git@example.org:repo.git`. |
| dest | path | *required* | This is the path to the directory to check out the repository at. |
| ref | string | *semi-optional* | Commit hash or tag name to check out. |
| branch | string | *semi-optional* | Branch name to check out. The branch must exist on the remote. |
| expect | list of paths relative to repository root | *optional* | The specified files and directories will be checked for existence after the repository is checked out. Directories are indicated by a trailing `/`. The specified files/directories are provided as resources. |
| submodules | true or false | false | Whether to update submodules or not. |
| deploy_key | fridge path | *optional* | If specified, the specified SSH private key will be used to clone the Git repository. Useful in conjunction with various Git providers' (e.g. Gitea) 'Deploy keys' feature, granting read-only access to a single repository. |
### Example
```scoml
[[git]] Clone the abc git repository
src = "gitea@bics.ga:reivilibre/abc.git"
dest = "/home/abc/abc"
ref = "61242b97d6"
expect = ["setup.py", "abc/"]
deploy_key = "Common/gitea_deploy_key.frozen"
```

View File

@ -1,88 +0,0 @@
# Systemd
Systemd is a service manager, used in some Linux distributions.
This Scone module allows declaring units, enabling them, and marking them to be started.
| Recipe | Needs | Provides |
| -----: | ----- | -------- |
| [`systemd`](#systemd) | `file`? | |
| [`systemd-timer`](#systemd-timer) | | |
## `systemd`
Declares and optionally enables and/or starts a systemd unit.
**Note:** probably must be run as the `root` user.
**Preconditions:** if `already_installed` is `true`, the specified unit must already be available for systemd.
| Argument | Accepted Values | Default | Description |
| -------: | --------------- | ------- | ----------- |
| unit | any systemd unit name string | *required* | Name of the Systemd unit. If no extension is provided, `.service` will be inferred. |
| at | a path | *semi-optional* | If specified, the path to where the unit file already exists. |
| already_installed | true, false | false | If true, no path is needed, as it will be assumed that the unit is already available. |
| enabled | true, false | *optional* | If true, the unit will be enabled ('installed'). |
| started | true, false | *optional* | If true, the unit will be started. |
| ~~restart_on~~ | ~~list of paths~~ | *optional* | **NOT IMPLEMENTED** ~~a list of files to which changes will cause the unit to be restarted.~~ reloaded? |
It is an error to not specify **at** if **already_installed** is not specified as true.
### Example
```scoml
[[systemd]] Enable the gitea service
unit = "gitea"
at = "/etc/systemd/system/gitea.service"
enabled = true
started = true
```
## `systemd-timer`
Declares and starts a Systemd Timer (along with a simple service for it to trigger). This is a useful alternative to cronjobs.
**Note:** probably must be run as the `root` user.
**Preconditions:** The `cd` path must be provided.
**Postconditions:** `{unit}.service` and `{unit}.timer` will exist as Systemd units. `{unit}.timer` will be configured to trigger `{unit}.service` and will be started.
| Argument | Accepted Values | Default | Description |
| -------: | --------------- | ------- | ----------- |
| user | string | *required* | Name of the user that will run the executable in the systemd service. |
| group | string | same as `user` | Name of the group that will run the executable in the systemd service. |
| unit | any systemd unit name string | *required* | Name of the Systemd units, **without** extension. |
| description | string | *required* | `[Unit] Description=` string for systemd. |
| cd | path | *required* | Working directory for the executable |
| command | string | *required* | Executable for the service to start. |
| calendar | string or list of strings | *required* | List of Systemd `OnCalendar=` times to trigger this timer at. |
| persistent | true, false | false | If true, the timer will trigger immediately upon activation if it missed the previous trigger time. |
| enabled | true, false | true | If true, the timer will be enabled and started. |
| environment | dict of string to string | *optional* | If specified, these environment variables will be provided to the command. |
### Systemd `OnCalendar` syntax
Examples:
* `Fri *-*-* 10:00:00`: every Friday at 10h00
* `*-*-1 09:00:00`: every 1st of the month at 09h00
* `Wed *-*-1..7 12:00:00`: the first Wednesday of every month at noon
* `*-11-* 06,09:00:00`: every day of November at 06h00 and 09h00
You can use `systemd-analyze calendar '*-*-* *:*:*'` (for example) to test an `OnCalendar` expression.
### Example
```scoml
[[systemd-timer]] Install timer to renew certificates every day at 04h00
user = radm
unit = "radm_renew"
description = "Renews, issues and deploys TLS certificates"
cd = "/home/radm/rei-acme-dns"
command = "/home/radm/rei-acme-dns/radm.sh --contact 'mailto:${radm.contact_email}'"
calendar = "*-*-* 04:00:00"
```

View File

@ -1,37 +0,0 @@
# Users
Users are a fundamental concept in modern operating systems.
It is useful to be able to create them, both for operators and for services.
| Recipe | Needs | Provides |
| -----: | ----- | -------- |
| [`os-user`](#os-user) | | `os-user` |
## `os-user`
Creates a user.
**Note:** probably must be run as the `root` user.
**Provides**: `os-user(?)` where `?` is the argument `name`.
| Argument | Accepted Values | Default | Description |
| -------: | --------------- | ------- | ----------- |
| name | any username string | *required* | Username of the OS (e.g. Linux) user that already exists. |
| make_group | true, false | true | True if the user should have its own group with the same name. |
| make_home | true, false | true | True if the user's home directory should be created. |
| home | any path string | *optional* | Directory for the user's home. If not specified, the operating system will choose it. |
| password | any string | *optional* | Password for the user. If not specified, no password will be created. |
### Example
Idiomatic example, showing running the recipe as a root user and declaring the provision of the user's home directory.
```scoml
[[os-user]]
@provides directory(/home/hedgedoc)
@user = root
name = hedgedoc
```

View File

@ -1,56 +0,0 @@
# Installing scone to a server and creating a sous chef
These instructions are provided for Debian-like servers.
## Create a user for your sous
Log in as root on the server you want to control with scone.
Create a user for scone.
```
# adduser --disabled-password scone
```
<!--- on PinePhone (alpine), I needed to do `passwd -d scone` so that ssh would allow connection, since it starts locked but passwd -d deletes the password -->
Make your scone user have passwordless sudo capability.
```
# echo 'scone ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/scone
# chmod 0440 /etc/sudoers.d/scone
# visudo -c # check sudoers file
```
## Install scone on your sous
You'll need the SSH public key for your controlling computer here;
try looking in `/home/$USER/.ssh/id_ed25519.pub` or `/home/$USER/.ssh/id_rsa.pub`.
Alternatively, I suggest generating a new SSH key in your password manager, such as
KeePassXC, and adding it to your SSH agent as-and-when you need to use Scone.
```
# su scone
$ cd
$ mkdir .ssh
$ echo 'ssh-ed25519 AA...bJ user@headmachine' >> .ssh/authorized_keys # use your own SSH public key here
$ chmod -R go-rw .ssh
```
Now activate your Scone virtual environment on your host and run, from within
the `scone` repository directory on your host:
```
./contrib/install_scone.sh scone@machine.example.org
```
## Add to your head configuration
Finally, add a section to your `scone.head.toml` file to describe the new installation
you have made.
```toml
[sous.machine]
host = "machine.example.org"
user = "scone"
souscmd = "/home/scone/.scone/venv/bin/python -m scone.sous ~/.scone"
```
TODO: `scone.sous.toml`, `[groups]`, ...

View File

@ -30,6 +30,3 @@ ignore_missing_imports = True
[mypy-docker.*] [mypy-docker.*]
ignore_missing_imports = True ignore_missing_imports = True
[mypy-mysql]
ignore_missing_imports = True

74
scone/__main__.py Normal file
View File

@ -0,0 +1,74 @@
# Copyright 2020, Olivier 'reivilibre'.
#
# This file is part of Scone.
#
# Scone is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Scone is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>.
# import asyncio
# import itertools
# import sys
# from typing import List
#
# from scone.head.head import Head
# from scone.head.recipe import Recipe
# from scone.head.kitchen import Kitchen
# from scone.head.recipe import Preparation
# def main(args=None):
# if args is None:
# args = sys.argv[1:]
#
# if len(args) < 1:
# raise RuntimeError("Needs to be passed a sous config directory as 1st arg!")
#
# print("Am I a head?")
#
# head = Head.open(args[0])
#
# print(head.debug_info())
#
# recipes_by_sous = head.construct_recipes()
#
# all_recipes: List[Recipe] = list(
# itertools.chain.from_iterable(recipes_by_sous.values())
# )
#
# prepare = Preparation(all_recipes)
# order = prepare.prepare(head)
#
# for epoch, items in enumerate(order):
# print(f"----- Course {epoch} -----")
#
# for item in items:
# if isinstance(item, Recipe):
# print(f" > recipe {item}")
# elif isinstance(item, tuple):
# kind, ident, extra = item
# print(f" - we now have {kind} {ident} {dict(extra)}")
#
# print("Starting run")
#
# k = Kitchen(head)
#
# async def cook():
# for epoch, epoch_items in enumerate(order):
# print(f"Cooking Course {epoch} of {len(order)}")
# await k.run_epoch(epoch_items)
#
# asyncio.get_event_loop().run_until_complete(cook())
#
#
# if __name__ == "__main__":
# main()

View File

@ -19,7 +19,7 @@ import asyncio
import logging import logging
import struct import struct
import sys import sys
from asyncio import IncompleteReadError, Queue, Task from asyncio import Queue, Task
from asyncio.streams import FlowControlMixin, StreamReader, StreamWriter from asyncio.streams import FlowControlMixin, StreamReader, StreamWriter
from typing import Any, Dict, Optional from typing import Any, Dict, Optional
@ -63,15 +63,9 @@ class ChanPro:
self._out.write(encoded) self._out.write(encoded)
await self._out.drain() await self._out.drain()
async def _recv_dict(self) -> Optional[dict]: async def _recv_dict(self) -> dict:
size = struct.calcsize(SIZE_FORMAT) size = struct.calcsize(SIZE_FORMAT)
try:
encoded_len = await self._in.readexactly(size) encoded_len = await self._in.readexactly(size)
except IncompleteReadError as ire:
if len(ire.partial) == 0:
# this is just EOF
return None
(length,) = struct.unpack(SIZE_FORMAT, encoded_len) (length,) = struct.unpack(SIZE_FORMAT, encoded_len)
encoded = await self._in.readexactly(length) encoded = await self._in.readexactly(length)
return cbor2.loads(encoded) return cbor2.loads(encoded)
@ -97,23 +91,10 @@ class ChanPro:
idx = 0 idx = 0
while True: while True:
message = await self._recv_dict() message = await self._recv_dict()
if message is None:
logger.debug("EOF message")
break
# logger.debug("<message> %d %r", idx, message) # logger.debug("<message> %d %r", idx, message)
idx += 1 idx += 1
await self.handle_incoming_message(message, default_route=default_route) await self.handle_incoming_message(message, default_route=default_route)
for chan_id, channel in self._channels.items():
if not channel._closed:
logger.debug("closing chan %s due to EOF", chan_id)
channel._closed = True
await channel._queue.put(None)
# else:
# logger.debug(
# "NOT closing chan %s due to EOF: already closed", chan_id
# )
self._listener = asyncio.create_task( self._listener = asyncio.create_task(
channel_listener() # py 3.8 , name="chanpro channel listener" channel_listener() # py 3.8 , name="chanpro channel listener"
) )
@ -177,19 +158,7 @@ class Channel:
async def wait_close(self): async def wait_close(self):
try: try:
closure = await self.recv() await self.recv()
if not isinstance(closure, dict):
raise ValueError("Closure should be of type dict")
if not closure.get("closure"):
raise ValueError("Closure should have closure: True")
reason = closure.get("reason")
if reason != "Utensil complete":
raise ValueError(
f"Closure was not for the right reason: expected 'Utensil complete'"
f", found {reason}"
)
raise RuntimeError("Message arrived when expecting closure.") raise RuntimeError("Message arrived when expecting closure.")
except EOFError: except EOFError:
# expected # expected
@ -211,9 +180,6 @@ class ChanProHead:
self._channel0 = channel0 self._channel0 = channel0
self._next_channel_id = 1 self._next_channel_id = 1
async def close(self):
await self._chanpro.close()
async def start_command_channel(self, command: str, payload: Any) -> Channel: async def start_command_channel(self, command: str, payload: Any) -> Channel:
new_channel = self._chanpro.new_channel(self._next_channel_id, command) new_channel = self._chanpro.new_channel(self._next_channel_id, command)
self._next_channel_id += 1 self._next_channel_id += 1

View File

@ -16,10 +16,8 @@
# along with Scone. If not, see <https://www.gnu.org/licenses/>. # along with Scone. If not, see <https://www.gnu.org/licenses/>.
import os import os
import re
import sys import sys
from hashlib import sha256 from hashlib import sha256
from typing import Dict
def eprint(*args, **kwargs): def eprint(*args, **kwargs):
@ -59,25 +57,3 @@ def sha256_file(path: str) -> str:
def sha256_bytes(data: bytes) -> str: def sha256_bytes(data: bytes) -> str:
return sha256(data).hexdigest() return sha256(data).hexdigest()
def multireplace(string: str, replacements: Dict[str, str]) -> str:
"""
Given a string and a replacement map, it returns the replaced string.
:param string: string to execute replacements on
:param replacements: replacement dictionary {value to find: value to replace}
source: https://stackoverflow.com/a/36620263
"""
# Place longer ones first to keep shorter substrings from matching
# where the longer ones should take place
# For instance given the replacements {'ab': 'AB', 'abc': 'ABC'} against
# the string 'hey abc', it should produce 'hey ABC' and not 'hey ABc'
substrs = sorted(replacements, key=len, reverse=True)
# Create a big OR regex that matches any of the substrings to replace
regexp = re.compile("|".join(map(re.escape, substrs)))
# For each match, look up the new string in the replacements
return regexp.sub(lambda match: replacements[match.group(0)], string)

View File

@ -1,89 +0,0 @@
# Copyright 2020, Olivier 'reivilibre'.
#
# This file is part of Scone.
#
# Scone is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Scone is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>.
import asyncio
import logging
import random
from asyncio import Lock
from collections import defaultdict
from typing import Dict, List, Tuple
from scone.default.utensils.basic_utensils import SimpleExec
from scone.head.head import Head
from scone.head.kitchen import Kitchen, Preparation
from scone.head.recipe import Recipe, RecipeContext
from scone.head.utils import check_type
logger = logging.getLogger(__name__)
# (id of Kitchen, sous name) → Lock
apk_locks: Dict[Tuple[int, str], Lock] = defaultdict(Lock)
class ApkPackage(Recipe):
_NAME = "apk-install"
def __init__(self, recipe_context: RecipeContext, args: dict, head):
super().__init__(recipe_context, args, head)
self.packages: List[str] = check_type(args["packages"], list)
def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head)
for package in self.packages:
preparation.provides("apk-package", package)
async def _apk_command(
self, kitchen: Kitchen, args: List[str]
) -> SimpleExec.Result:
retries = 3
while retries > 0:
result = await kitchen.ut1areq(SimpleExec(args, "/"), SimpleExec.Result)
logger.debug("E %r: %r", args, result.stderr)
if result.exit_code == 0:
return result
sleep = 2.0 + 3.0 * random.random()
await asyncio.sleep(sleep)
return result # noqa
async def cook(self, kitchen: Kitchen) -> None:
# this is a one-off task assuming everything works
kitchen.get_dependency_tracker()
lock = apk_locks[(id(kitchen), self.recipe_context.sous)]
if not self.packages:
return
# we only let one apk operation run at once on each sous
async with lock:
# apk update not needed because it's automatic once the cache timer expires!
install_args = ["apk", "add", "-q"]
install_args += list(self.packages)
install = await self._apk_command(kitchen, install_args)
if install.exit_code != 0:
raise RuntimeError(
f"apk add failed with err {install.exit_code}:"
f" {install.stderr!r}"
)

View File

@ -17,10 +17,7 @@
import asyncio import asyncio
import logging import logging
import random from typing import List
from asyncio import Lock
from collections import defaultdict
from typing import Dict, List, Tuple
from scone.default.utensils.basic_utensils import SimpleExec from scone.default.utensils.basic_utensils import SimpleExec
from scone.head.head import Head from scone.head.head import Head
@ -117,10 +114,6 @@ logger = logging.getLogger(__name__)
# ) # )
# (id of Kitchen, sous name) → Lock
apt_locks: Dict[Tuple[int, str], Lock] = defaultdict(Lock)
class AptPackage(Recipe): class AptPackage(Recipe):
_NAME = "apt-install" _NAME = "apt-install"
@ -140,12 +133,7 @@ class AptPackage(Recipe):
retries = 3 retries = 3
while retries > 0: while retries > 0:
result = await kitchen.ut1areq( result = await kitchen.ut1areq(SimpleExec(args, "/"), SimpleExec.Result)
SimpleExec(args, "/", {"DEBIAN_FRONTEND": "noninteractive"}),
SimpleExec.Result,
)
logger.debug("E %r: %r", args, result.stderr)
if result.exit_code == 0 or b"/lock" not in result.stderr: if result.exit_code == 0 or b"/lock" not in result.stderr:
return result return result
@ -154,6 +142,8 @@ class AptPackage(Recipe):
"Failed apt command due to suspected locking issue. Will retry…" "Failed apt command due to suspected locking issue. Will retry…"
) )
retries -= 1
# /lock seen in stderr, probably a locking issue... # /lock seen in stderr, probably a locking issue...
lock_check = await kitchen.ut1areq( lock_check = await kitchen.ut1areq(
SimpleExec( SimpleExec(
@ -170,8 +160,7 @@ class AptPackage(Recipe):
) )
retries -= 1 retries -= 1
sleep = 2.0 + 3.0 * random.random() await asyncio.sleep(2.0)
await asyncio.sleep(sleep)
return result # noqa return result # noqa
@ -179,13 +168,7 @@ class AptPackage(Recipe):
# this is a one-off task assuming everything works # this is a one-off task assuming everything works
kitchen.get_dependency_tracker() kitchen.get_dependency_tracker()
lock = apt_locks[(id(kitchen), self.recipe_context.sous)] if self.packages:
# we only want one apt task to run at once on each sous because they tend
# to race against each other and lock each other
async with lock:
if not self.packages:
return
update = await self._apt_command(kitchen, ["apt-get", "-yq", "update"]) update = await self._apt_command(kitchen, ["apt-get", "-yq", "update"])
if update.exit_code != 0: if update.exit_code != 0:
raise RuntimeError( raise RuntimeError(

View File

@ -1,208 +0,0 @@
# Copyright 2022, Olivier 'reivilibre'.
#
# This file is part of Scone.
#
# Scone is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Scone is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>.
import logging
from asyncio import Lock
from typing import Any, Dict, List, Optional, Tuple, Union
import attr
from HurricaneDNS import HurricaneDNS
from scone.head.head import Head
from scone.head.kitchen import Kitchen, Preparation
from scone.head.recipe import HeadRecipe, RecipeContext
from scone.head.utils import check_type
logger = logging.getLogger(__name__)
@attr.s(auto_attribs=True)
class DnsRecord:
subdomain: str
kind: str
value: str
ttl: Optional[str]
priority: Optional[str]
def parse_records(given_raw: Union[Any, Dict[str, Dict[str, str]]]) -> List[DnsRecord]:
given = check_type(given_raw, dict, "records")
the_list: List[DnsRecord] = []
for key, attributes in given.items():
# keys: "xyz A"
# values: dicts, with keys:
# - v: 1.2.3.4
# - ttl: 86400
# - priority: 50 (for MXes)
pieces = key.split(" ")
if len(pieces) > 2:
raise ValueError(
f"Key {key} should be space-separable with 2 or less pieces."
)
if len(pieces) == 2:
subdomain, kind = pieces
else:
assert len(pieces) == 1
(kind,) = pieces
subdomain = ""
ttl_raw = attributes.get("ttl")
prio_raw = attributes.get("priority")
record_value = attributes.get("v", attributes.get("value", None))
if record_value is None:
raise ValueError("No record value")
the_list.append(
DnsRecord(
subdomain=subdomain,
kind=kind,
value=record_value,
ttl=None if ttl_raw is None else str(ttl_raw),
priority=None if prio_raw is None else str(prio_raw),
)
)
return the_list
@attr.s(auto_attribs=True)
class HeRecord:
id: str
status: Optional[str]
host: str
type: str
ttl: str
# MX Priority
mx: str
value: str
extended: str
@attr.s(auto_attribs=True)
class HeDomain:
domain: str
id: str
type: str
records: Optional[List[HeRecord]]
@attr.s(auto_attribs=True)
class HurricaneElectricCache:
client: HurricaneDNS
lock: Lock
domains: Dict[str, HeDomain]
# Tuple from (id(head), user, password) → HE DNS cache)
# If Scone is ever long-living, this could leak, but it doesn't so it won't matter
HE_CLIENT_CACHE: Dict[Tuple[int, str, str], HurricaneElectricCache] = {}
CLIENT_CACHE_LOCK: Lock = Lock()
class HurricaneElectricDns(HeadRecipe):
_NAME = "dns-hurricane"
def __init__(self, recipe_context: RecipeContext, args: dict, head: Head):
super().__init__(recipe_context, args, head)
self.username = check_type(args.get("username"), str)
self.password = check_type(args.get("password"), str)
self.domain = check_type(args.get("domain"), str)
self.records = parse_records(args.get("records"))
def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head)
async def _get_client(self, head: Head) -> HurricaneElectricCache:
async with CLIENT_CACHE_LOCK:
cache_key = id(head), self.username, self.password
if cache_key in HE_CLIENT_CACHE:
# Happy days
return HE_CLIENT_CACHE[cache_key]
# TODO(performance): this takes about 3 sec; move it to an executor thread
client = HurricaneDNS(self.username, self.password)
domains = {}
for domain in client.list_domains():
dom = HeDomain(**domain)
domains[dom.domain] = dom
entry = HurricaneElectricCache(client, Lock(), domains)
HE_CLIENT_CACHE[cache_key] = entry
return entry
async def _get_records(
self, cached: HurricaneElectricCache, domain: HeDomain
) -> List[HeRecord]:
async with cached.lock:
if domain.records is not None:
return domain.records
domain.records = [
HeRecord(**row) for row in cached.client.list_records(domain.domain)
]
return domain.records
async def cook(self, kitchen: Kitchen) -> None:
# TODO(correctness): can't handle multiple DNS records
# with same (type, subdomain)
kitchen.get_dependency_tracker().ignore()
cached = await self._get_client(kitchen.head)
records = await self._get_records(cached, cached.domains[self.domain])
records_cache: Dict[Tuple[str, str], HeRecord] = {}
for record in records:
dotted_subdomain_suffix = f".{self.domain}"
if record.host == self.domain:
subdomain = ""
elif record.host.endswith(dotted_subdomain_suffix):
subdomain = record.host[: -len(dotted_subdomain_suffix)]
else:
raise ValueError(f"Can't figure out subdomain for {record.host}")
records_cache[(subdomain, record.type)] = record
logger.debug("Present records: %r", records_cache.keys())
for wanted_record in self.records:
wr_key = (wanted_record.subdomain, wanted_record.kind)
logger.debug("Want %r: %r", wr_key, wanted_record)
existing_record = records_cache.get(wr_key)
if existing_record is not None:
# TODO(correctness): amend as needed
logger.debug("Found existing %r", existing_record)
else:
logger.debug("Will need to create new one")
async with cached.lock:
cached.client.add_record(
self.domain,
wanted_record.subdomain,
wanted_record.kind,
wanted_record.value,
wanted_record.priority or None,
wanted_record.ttl or 86400,
)

View File

@ -1,13 +1,10 @@
from scone.default.utensils.docker_utensils import ( from scone.default.utensils.docker_utensils import (
ContainerState,
DockerContainerRun, DockerContainerRun,
DockerContainerState,
DockerImagePull, DockerImagePull,
DockerNetworkCreate, DockerNetworkCreate,
DockerVolumeCreate, DockerVolumeCreate,
) )
from scone.head.head import Head from scone.head.kitchen import Kitchen
from scone.head.kitchen import Kitchen, Preparation
from scone.head.recipe import Recipe, RecipeContext from scone.head.recipe import Recipe, RecipeContext
from scone.head.utils import check_type, check_type_opt from scone.head.utils import check_type, check_type_opt
@ -19,40 +16,12 @@ class DockerContainer(Recipe):
super().__init__(recipe_context, args, head) super().__init__(recipe_context, args, head)
self.image = check_type(args.get("image"), str) self.image = check_type(args.get("image"), str)
self.command = check_type_opt(args.get("command"), str) self.command = check_type(args.get("command"), str)
self.name = check_type(args.get("name"), str)
self.volumes = check_type(args.get("volumes", dict()), dict)
self.ports = check_type(args.get("ports", dict()), dict)
self.environment = check_type(args.get("environment", dict()), dict)
self.restart_policy = check_type(args.get("restart_policy", "on-failure"), str)
self.cap_add = check_type(args.get("cap_add", list()), list)
self.cap_drop = check_type(args.get("cap_drop", list()), list)
def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head)
preparation.provides("docker-container", self.name)
async def cook(self, kitchen: Kitchen) -> None: async def cook(self, kitchen: Kitchen) -> None:
kitchen.get_dependency_tracker() kitchen.get_dependency_tracker()
current_state = ContainerState(
await kitchen.ut1(DockerContainerState(self.name))
)
if current_state == ContainerState.NOTFOUND:
await kitchen.ut1areq( await kitchen.ut1areq(
DockerContainerRun( DockerContainerRun(self.image, self.command), DockerContainerRun.Result
self.image,
self.command,
self.name,
{k: (v["host"], v["port"]) for k, v in self.ports.items()},
self.volumes,
{k: str(v) for k, v in self.environment.items()},
self.restart_policy,
self.cap_add,
self.cap_drop,
),
DockerContainerRun.Result,
) )

View File

@ -16,13 +16,11 @@
# along with Scone. If not, see <https://www.gnu.org/licenses/>. # along with Scone. If not, see <https://www.gnu.org/licenses/>.
from pathlib import Path from pathlib import Path
from typing import List, Optional, Tuple from typing import List
from scone.common.modeutils import DEFAULT_MODE_DIR, parse_mode from scone.common.modeutils import DEFAULT_MODE_DIR, parse_mode
from scone.default.steps import filesystem_steps, fridge_steps
from scone.default.steps.basic_steps import exec_no_fails from scone.default.steps.basic_steps import exec_no_fails
from scone.default.steps.filesystem_steps import depend_remote_file from scone.default.steps.filesystem_steps import depend_remote_file
from scone.default.steps.fridge_steps import FridgeMetadata, load_and_transform
from scone.default.utensils.basic_utensils import ( from scone.default.utensils.basic_utensils import (
Chmod, Chmod,
Chown, Chown,
@ -307,26 +305,6 @@ class GitCheckout(Recipe):
self.expect: List[str] = check_type(args.get("expect", []), list) self.expect: List[str] = check_type(args.get("expect", []), list)
self.submodules = check_type(args.get("submodules", False), bool) self.submodules = check_type(args.get("submodules", False), bool)
deploy_key = check_type_opt(args.get("deploy_key"), str)
if deploy_key is not None:
deploy_key_search = fridge_steps.search_in_fridge(head, deploy_key)
if deploy_key_search is None:
raise ValueError(
f"Cannot find deploy key: {deploy_key!r} in the fridge."
)
desugared_src, dk_fullpath = deploy_key_search
_unextended_path_str, dk_meta = fridge_steps.decode_fridge_extension(
str(dk_fullpath)
)
self.deploy_key: Optional[Tuple[Path, FridgeMetadata]] = (
dk_fullpath,
dk_meta,
)
else:
self.deploy_key = None
def prepare(self, preparation: Preparation, head: Head) -> None: def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head) super().prepare(preparation, head)
parent = str(Path(self.dest_dir).parent) parent = str(Path(self.dest_dir).parent)
@ -344,7 +322,7 @@ class GitCheckout(Recipe):
# no non-arg dependencies # no non-arg dependencies
k.get_dependency_tracker() k.get_dependency_tracker()
stat = await k.ut1a(Stat(self.dest_dir + "/.git"), Stat.Result) stat = await k.ut1a(Stat(self.dest_dir), Stat.Result)
if stat is None: if stat is None:
# doesn't exist; git init it # doesn't exist; git init it
await exec_no_fails(k, ["git", "init", self.dest_dir], "/") await exec_no_fails(k, ["git", "init", self.dest_dir], "/")
@ -367,37 +345,6 @@ class GitCheckout(Recipe):
) )
# fetch the latest from the remote # fetch the latest from the remote
if self.deploy_key:
deploy_key_full_path, deploy_key_fridge_metadata = self.deploy_key
remote_deploy_key = self.dest_dir.rstrip("/") + "~deploykey"
data = await load_and_transform(
k,
deploy_key_fridge_metadata,
deploy_key_full_path,
self.recipe_context.variables,
)
await filesystem_steps.write_sous_file(
k,
remote_deploy_key,
# strict user-only access
parse_mode("u=rw,go=", directory=False),
data,
)
await exec_no_fails(
k,
[
"git",
"-c",
f"core.sshCommand=ssh -i {remote_deploy_key}",
"fetch",
"scone",
],
self.dest_dir,
)
await filesystem_steps.delete_sous_file(k, remote_deploy_key)
else:
await exec_no_fails(k, ["git", "fetch", "scone"], self.dest_dir) await exec_no_fails(k, ["git", "fetch", "scone"], self.dest_dir)
# figure out what ref we want to use # figure out what ref we want to use

View File

@ -20,32 +20,24 @@ import logging
import os import os
from asyncio import Future from asyncio import Future
from pathlib import Path from pathlib import Path
from typing import Dict, List, Set, Tuple, cast from typing import Dict, cast
from urllib.parse import urlparse from urllib.parse import urlparse
import requests import requests
from scone.common.misc import sha256_file from scone.common.misc import sha256_file
from scone.common.modeutils import DEFAULT_MODE_DIR, DEFAULT_MODE_FILE, parse_mode from scone.common.modeutils import DEFAULT_MODE_FILE, parse_mode
from scone.default.steps import fridge_steps from scone.default.steps import fridge_steps
from scone.default.steps.fridge_steps import ( from scone.default.steps.fridge_steps import (
SUPERMARKET_RELATIVE, SUPERMARKET_RELATIVE,
FridgeMetadata, FridgeMetadata,
load_and_transform, load_and_transform,
) )
from scone.default.utensils.basic_utensils import ( from scone.default.utensils.basic_utensils import Chmod, Chown, HashFile, WriteFile
Chmod,
Chown,
HashFile,
MakeDirectory,
Stat,
WriteBlockInFile,
WriteFile,
)
from scone.head.head import Head from scone.head.head import Head
from scone.head.kitchen import Kitchen, Preparation from scone.head.kitchen import Kitchen, Preparation
from scone.head.recipe import Recipe, RecipeContext from scone.head.recipe import Recipe, RecipeContext
from scone.head.utils import check_type, check_type_opt from scone.head.utils import check_type
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
@ -86,9 +78,6 @@ class FridgeCopy(Recipe):
self.fridge_meta: FridgeMetadata = meta self.fridge_meta: FridgeMetadata = meta
self.mode = parse_mode(mode, directory=False) self.mode = parse_mode(mode, directory=False)
self.targ_user = check_type_opt(args.get("owner"), str)
self.targ_group = check_type_opt(args.get("group"), str)
self._desugared_src = desugared_src self._desugared_src = desugared_src
def prepare(self, preparation: Preparation, head: Head) -> None: def prepare(self, preparation: Preparation, head: Head) -> None:
@ -96,15 +85,9 @@ class FridgeCopy(Recipe):
preparation.provides("file", str(self.destination)) preparation.provides("file", str(self.destination))
preparation.needs("directory", str(self.destination.parent)) preparation.needs("directory", str(self.destination.parent))
if self.targ_user:
preparation.needs("os-user", self.targ_user)
if self.targ_group:
preparation.needs("os-group", self.targ_group)
async def cook(self, k: Kitchen) -> None: async def cook(self, k: Kitchen) -> None:
data = await load_and_transform( data = await load_and_transform(
k, self.fridge_meta, self.real_path, self.recipe_context.variables k, self.fridge_meta, self.real_path, self.recipe_context.sous
) )
dest_str = str(self.destination) dest_str = str(self.destination)
chan = await k.start(WriteFile(dest_str, self.mode)) chan = await k.start(WriteFile(dest_str, self.mode))
@ -113,9 +96,6 @@ class FridgeCopy(Recipe):
if await chan.recv() != "OK": if await chan.recv() != "OK":
raise RuntimeError(f"WriteFail failed on fridge-copy to {self.destination}") raise RuntimeError(f"WriteFail failed on fridge-copy to {self.destination}")
if self.targ_user or self.targ_group:
await k.ut0(Chown(dest_str, self.targ_user, self.targ_group))
# this is the wrong thing # this is the wrong thing
# hash_of_data = sha256_bytes(data) # hash_of_data = sha256_bytes(data)
# k.get_dependency_tracker().register_remote_file(dest_str, hash_of_data) # k.get_dependency_tracker().register_remote_file(dest_str, hash_of_data)
@ -123,117 +103,6 @@ class FridgeCopy(Recipe):
k.get_dependency_tracker().register_fridge_file(self._desugared_src) k.get_dependency_tracker().register_fridge_file(self._desugared_src)
class FridgeCopyDir(Recipe):
"""
Declares that a directory(!) should be copied from the head to the sous,
and optionally, remote files deleted.
"""
_NAME = "fridge-copy-dir"
def __init__(self, recipe_context: RecipeContext, args: dict, head: Head):
super().__init__(recipe_context, args, head)
files = fridge_steps.search_children_in_fridge(head, args["src"])
if not files:
raise ValueError(
f"Cannot find children of directory {args['src']}"
f" in the fridge (empty directories not allowed)."
)
self.files: List[Tuple[str, str, Path]] = files
dest = check_type(args["dest"], str)
self.dest_dir = Path(dest)
self.destinations: List[Path] = []
self.mkdirs: Set[str] = set()
for relative, relative_unprefixed, full_path in self.files:
unextended_path_str, _ = fridge_steps.decode_fridge_extension(
relative_unprefixed
)
self.destinations.append(Path(args["dest"], unextended_path_str))
pieces = relative_unprefixed.split("/")
for end_index in range(0, len(pieces)):
self.mkdirs.add("/".join(pieces[0:end_index]))
mode = args.get("mode", DEFAULT_MODE_FILE)
dir_mode = args.get("mode_dir", args.get("mode", DEFAULT_MODE_DIR))
assert isinstance(mode, str) or isinstance(mode, int)
assert isinstance(dir_mode, str) or isinstance(dir_mode, int)
self.file_mode = parse_mode(mode, directory=False)
self.dir_mode = parse_mode(dir_mode, directory=True)
def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head)
preparation.needs("directory", str(self.dest_dir.parent))
for mkdir in self.mkdirs:
preparation.provides("directory", str(Path(self.dest_dir, mkdir)))
for (_relative, relative_unprefixed, full_path), destination in zip(
self.files, self.destinations
):
unextended_path_str, _ = fridge_steps.decode_fridge_extension(
relative_unprefixed
)
preparation.provides("file", str(destination))
async def cook(self, k: Kitchen) -> None:
# create all needed directories
for mkdir in self.mkdirs:
directory = str(Path(self.dest_dir, mkdir))
# print("mkdir ", directory)
stat = await k.ut1a(Stat(directory), Stat.Result)
if stat is None:
# doesn't exist, make it
await k.ut0(MakeDirectory(directory, self.dir_mode))
stat = await k.ut1a(Stat(directory), Stat.Result)
if stat is None:
raise RuntimeError("Directory vanished after creation!")
if stat.dir:
# if (stat.user, stat.group) != (self.targ_user, self.targ_group):
# # need to chown
# await k.ut0(Chown(directory, self.targ_user, self.targ_group))
if stat.mode != self.dir_mode:
await k.ut0(Chmod(directory, self.dir_mode))
else:
raise RuntimeError("Already exists but not a dir: " + directory)
# copy all files from the fridge
for (relative, relative_unprefixed, full_local_path), destination in zip(
self.files, self.destinations
):
unextended_path_str, meta = fridge_steps.decode_fridge_extension(
relative_unprefixed
)
full_remote_path = str(Path(self.dest_dir, unextended_path_str))
# print("fcp ", relative, " → ", full_remote_path)
data = await load_and_transform(
k, meta, full_local_path, self.recipe_context.variables
)
dest_str = str(full_remote_path)
chan = await k.start(WriteFile(dest_str, self.file_mode))
await chan.send(data)
await chan.send(None)
if await chan.recv() != "OK":
raise RuntimeError(
f"WriteFail failed on fridge-copy to {full_remote_path}"
)
k.get_dependency_tracker().register_fridge_file(relative)
class Supermarket(Recipe): class Supermarket(Recipe):
""" """
Downloads an asset (cached if necessary) and copies to sous. Downloads an asset (cached if necessary) and copies to sous.
@ -359,79 +228,3 @@ class Supermarket(Recipe):
with open(dest_path + ".txt", "w") as fout: with open(dest_path + ".txt", "w") as fout:
# leave a note so we can find out what this is if we need to. # leave a note so we can find out what this is if we need to.
fout.write(note) fout.write(note)
class FridgeBlockInFile(Recipe):
"""
Declares that a file should be copied from the head to the sous.
"""
_NAME = "fridge-block-in-file"
def __init__(self, recipe_context: RecipeContext, args: dict, head: Head):
super().__init__(recipe_context, args, head)
search = fridge_steps.search_in_fridge(head, args["src"])
if search is None:
raise ValueError(f"Cannot find {args['src']} in the fridge.")
desugared_src, fp = search
unextended_path_str, meta = fridge_steps.decode_fridge_extension(str(fp))
unextended_path = Path(unextended_path_str)
dest = args["dest"]
if not isinstance(dest, str):
raise ValueError("No destination provided or wrong type.")
if dest.endswith("/"):
self.destination: Path = Path(args["dest"], unextended_path.parts[-1])
else:
self.destination = Path(args["dest"])
mode = args.get("mode", DEFAULT_MODE_FILE)
assert isinstance(mode, str) or isinstance(mode, int)
self.fridge_path: str = check_type(args["src"], str)
self.real_path: Path = fp
self.fridge_meta: FridgeMetadata = meta
self.mode = parse_mode(mode, directory=False)
self.targ_user = check_type_opt(args.get("owner"), str)
self.targ_group = check_type_opt(args.get("group"), str)
self._desugared_src = desugared_src
self.marker_line_prefix = check_type(args.get("marker_line_prefix", "# "), str)
self.marker_name = check_type(args.get("marker_name"), str)
def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head)
preparation.provides("file", str(self.destination))
preparation.needs("directory", str(self.destination.parent))
if self.targ_user:
preparation.needs("os-user", self.targ_user)
if self.targ_group:
preparation.needs("os-group", self.targ_group)
async def cook(self, k: Kitchen) -> None:
data = await load_and_transform(
k, self.fridge_meta, self.real_path, self.recipe_context.variables
)
dest_str = str(self.destination)
await k.ut0(
WriteBlockInFile(
dest_str,
self.mode,
self.marker_line_prefix,
self.marker_name,
data.decode(),
)
)
if self.targ_user or self.targ_group:
await k.ut0(Chown(dest_str, self.targ_user, self.targ_group))
k.get_dependency_tracker().register_fridge_file(self._desugared_src)

View File

@ -1,52 +0,0 @@
# Copyright 2020, Olivier 'reivilibre'.
#
# This file is part of Scone.
#
# Scone is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Scone is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>.
from typing import List
from scone.default.utensils.basic_utensils import SimpleExec
from scone.head.head import Head
from scone.head.kitchen import Kitchen, Preparation
from scone.head.recipe import Recipe, RecipeContext
from scone.head.utils import check_type, check_type_opt
class ImperativeShellCommands(Recipe):
_NAME = "shell-commands"
def __init__(self, recipe_context: RecipeContext, args: dict, head):
super().__init__(recipe_context, args, head)
self.working_dir = check_type_opt(args.get("cd"), str)
self.commands = check_type(args.get("commands"), List[str])
def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head)
if self.working_dir:
preparation.needs("directory", self.working_dir)
async def cook(self, kitchen: Kitchen) -> None:
for command in self.commands:
result = await kitchen.ut1areq(
SimpleExec(["sh", "-c", command], self.working_dir or "/tmp"),
SimpleExec.Result,
)
if result.exit_code != 0:
esc_stderr = result.stderr.decode().replace("\n", "\n ")
raise RuntimeError(
f"Exit code of {command!r} was {result.exit_code}. stderr:\n"
f" {esc_stderr}\n" + ("-" * 40)
)

View File

@ -20,8 +20,7 @@ import logging
from typing import Optional from typing import Optional
from scone.default.steps import linux_steps from scone.default.steps import linux_steps
from scone.default.utensils.basic_utensils import SimpleExec from scone.default.utensils.linux_utensils import GetPasswdEntry
from scone.default.utensils.linux_utensils import GetGroupEntry, GetPasswdEntry
from scone.head.head import Head from scone.head.head import Head
from scone.head.kitchen import Kitchen, Preparation from scone.head.kitchen import Kitchen, Preparation
from scone.head.recipe import Recipe, RecipeContext from scone.head.recipe import Recipe, RecipeContext
@ -48,9 +47,6 @@ class LinuxUser(Recipe):
if self.make_group: if self.make_group:
preparation.provides("os-group", self.user_name) preparation.provides("os-group", self.user_name)
if self.home:
preparation.provides("directory", self.home)
async def cook(self, kitchen: Kitchen) -> None: async def cook(self, kitchen: Kitchen) -> None:
# TODO(documentation): note this does not update users # TODO(documentation): note this does not update users
# acknowledge tracking # acknowledge tracking
@ -83,43 +79,6 @@ class LinuxUser(Recipe):
) )
class LinuxGroup(Recipe):
_NAME = "os-group"
def __init__(self, recipe_context: RecipeContext, args: dict, head):
super().__init__(recipe_context, args, head)
self.group_name = check_type(args.get("name"), str)
def prepare(self, preparation: Preparation, head: "Head") -> None:
super().prepare(preparation, head)
preparation.provides("os-group", self.group_name)
async def cook(self, kitchen: Kitchen) -> None:
# acknowledge tracking
kitchen.get_dependency_tracker()
grp_entry = await kitchen.ut1a(
GetGroupEntry(self.group_name), GetGroupEntry.Result
)
if grp_entry:
logger.warning(
"Not updating existing os-group '%s' as it exists already.",
self.group_name,
)
else:
result = await kitchen.ut1areq(
SimpleExec(["groupadd", self.group_name], "/"), SimpleExec.Result
)
if result.exit_code != 0:
raise RuntimeError(
"Failed to create group. Error was: "
+ result.stderr.strip().decode()
)
class DeclareLinuxUser(Recipe): class DeclareLinuxUser(Recipe):
_NAME = "declare-os-user" _NAME = "declare-os-user"

View File

@ -1,153 +0,0 @@
# Copyright 2020, Olivier 'reivilibre'.
#
# This file is part of Scone.
#
# Scone is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Scone is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>.
from typing import List
from scone.default.utensils.db_utensils import MysqlTransaction
from scone.head.head import Head
from scone.head.kitchen import Kitchen, Preparation
from scone.head.recipe import Recipe, RecipeContext
from scone.head.utils import check_type, check_type_opt
def mysql_dodgy_escape_literal(unescaped: str) -> str:
python_esc = repr(unescaped)
if python_esc[0] == '"':
return "'" + python_esc[1:-1].replace("'", "\\'") + "'"
else:
assert python_esc[0] == "'"
return python_esc
def mysql_dodgy_escape_username(unescaped: str) -> str:
parts = unescaped.split("@")
if len(parts) != 2:
raise ValueError(f"{unescaped!r} is not a valid sconified mysql user name.")
return (
mysql_dodgy_escape_literal(parts[0])
+ "@"
+ mysql_dodgy_escape_literal(parts[1])
)
class MysqlDatabase(Recipe):
_NAME = "mysql-db"
def __init__(self, recipe_context: RecipeContext, args: dict, head):
super().__init__(recipe_context, args, head)
self.database_name = check_type(args.get("name"), str)
self.charset = args.get("charset", "utf8mb4")
self.collate = args.get("collate", "utf8mb4_unicode_ci")
self.grant_all_to = check_type_opt(args.get("grant_all_to"), List[str])
def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head)
preparation.provides("mysql-database", self.database_name)
if self.grant_all_to:
for user in self.grant_all_to:
preparation.needs("mysql-user", user)
async def cook(self, kitchen: Kitchen) -> None:
ch = await kitchen.start(MysqlTransaction("mysql", "root", unix_socket=True))
await ch.send(
(
"SHOW DATABASES LIKE %s",
self.database_name,
)
)
dbs = await ch.recv()
if len(dbs) > 0:
await ch.send(None)
await ch.wait_close()
return
q = f"""
CREATE DATABASE {self.database_name}
CHARACTER SET = {mysql_dodgy_escape_literal(self.charset)}
COLLATE = {mysql_dodgy_escape_literal(self.collate)}
"""
await ch.send((q,))
res = await ch.recv()
if len(res) != 0:
raise RuntimeError("expected empty result set.")
if self.grant_all_to:
for user in self.grant_all_to:
q = f"""
GRANT ALL PRIVILEGES ON {self.database_name}.*
TO {mysql_dodgy_escape_username(user)}
"""
await ch.send((q,))
res = await ch.recv()
if len(res) != 0:
raise RuntimeError("expected empty result set.")
q = """
FLUSH PRIVILEGES
"""
await ch.send((q,))
res = await ch.recv()
if len(res) != 0:
raise RuntimeError("expected empty result set.")
await ch.send(None)
await ch.wait_close()
class MysqlUser(Recipe):
_NAME = "mysql-user"
def __init__(self, recipe_context: RecipeContext, args: dict, head):
super().__init__(recipe_context, args, head)
self.user_name = check_type(args.get("name"), str)
self.password = check_type(args.get("password"), str)
def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head)
preparation.provides("mysql-user", self.user_name)
async def cook(self, kitchen: Kitchen) -> None:
ch = await kitchen.start(MysqlTransaction("mysql", "root", unix_socket=True))
await ch.send(
(
"SELECT 1 AS count FROM mysql.user "
"WHERE CONCAT(user, '@', host) = %s",
self.user_name,
)
)
dbs = await ch.recv()
if len(dbs) > 0 and dbs[0]["count"] == 1:
await ch.send(None)
await ch.wait_close()
return
# this is close enough to MySQL escaping I believe.
escaped_password = mysql_dodgy_escape_literal(str(self.password))
q = f"""
CREATE USER {mysql_dodgy_escape_username(self.user_name)}
IDENTIFIED BY {escaped_password}
"""
await ch.send((q,))
res = await ch.recv()
if len(res) != 0:
raise RuntimeError("expected empty result set.")
await ch.send(None)
await ch.wait_close()

View File

@ -92,11 +92,7 @@ class PostgresUser(Recipe):
super().__init__(recipe_context, args, head) super().__init__(recipe_context, args, head)
self.user_name = check_type(args.get("name"), str) self.user_name = check_type(args.get("name"), str)
self.passwordless = check_type(args.get("passwordless", False), bool)
if not self.passwordless:
self.password = check_type(args.get("password"), str) self.password = check_type(args.get("password"), str)
elif "password" in args:
raise ValueError("'password' specified when 'passwordless' also specified.")
def prepare(self, preparation: Preparation, head: Head) -> None: def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head) super().prepare(preparation, head)
@ -116,12 +112,6 @@ class PostgresUser(Recipe):
await ch.wait_close() await ch.wait_close()
return return
if self.passwordless:
q = f"""
CREATE ROLE {self.user_name}
LOGIN
"""
else:
# this is close enough to Postgres escaping I believe. # this is close enough to Postgres escaping I believe.
escaped_password = postgres_dodgy_escape_literal(str(self.password)) escaped_password = postgres_dodgy_escape_literal(str(self.password))
@ -137,34 +127,3 @@ class PostgresUser(Recipe):
raise RuntimeError("expected empty result set.") raise RuntimeError("expected empty result set.")
await ch.send(None) await ch.send(None)
await ch.wait_close() await ch.wait_close()
class PostgresCommands(Recipe):
_NAME = "pg-commands"
def __init__(self, recipe_context: RecipeContext, args: dict, head):
super().__init__(recipe_context, args, head)
self.database = check_type(args.get("database"), str)
self.commands = check_type(args.get("commands"), list)
self.skip_database_need = check_type(
args.get("skip_database_need", False), bool
)
def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head)
if not self.skip_database_need:
preparation.needs("postgres-database", self.database)
async def cook(self, kitchen: Kitchen) -> None:
ch = await kitchen.start(PostgresTransaction(self.database))
for command in self.commands:
await ch.send((command,))
res = await ch.recv()
if len(res) != 0:
raise RuntimeError("expected empty result set.")
await ch.send(None)
await ch.wait_close()

View File

@ -14,7 +14,7 @@
# #
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>. # along with Scone. If not, see <https://www.gnu.org/licenses/>.
import re
from pathlib import Path from pathlib import Path
from typing import List, Tuple from typing import List, Tuple
@ -51,8 +51,6 @@ class PythonVenv(Recipe):
self.no_apt_install = check_type(args.get("_no_apt_install", False), bool) self.no_apt_install = check_type(args.get("_no_apt_install", False), bool)
self.upgrade = check_type(args.get("upgrade", False), bool)
# TODO(sdists) # TODO(sdists)
def prepare(self, preparation: Preparation, head: Head): def prepare(self, preparation: Preparation, head: Head):
@ -60,11 +58,10 @@ class PythonVenv(Recipe):
preparation.needs("directory", str(Path(self.dir).parent)) preparation.needs("directory", str(Path(self.dir).parent))
for name, flags in self.install: for name, flags in self.install:
featureless_name = re.sub(r"(\[[^]]+\])+$", "", name)
if "-r" in flags: if "-r" in flags:
preparation.needs("file", name) preparation.needs("file", name)
elif "git" in flags or "dir" in flags: elif "git" in flags or "dir" in flags:
preparation.needs("directory", featureless_name) preparation.needs("directory", name)
final_script = str(Path(self.dir, "bin/python")) final_script = str(Path(self.dir, "bin/python"))
@ -83,10 +80,6 @@ class PythonVenv(Recipe):
await exec_no_fails(kitchen, [self.interpreter, "-m", "venv", self.dir], "/") await exec_no_fails(kitchen, [self.interpreter, "-m", "venv", self.dir], "/")
install_args = [] install_args = []
if self.upgrade:
install_args.append("--upgrade")
for name, flags in self.install: for name, flags in self.install:
if "-r" in flags: if "-r" in flags:
install_args.append("-r") install_args.append("-r")

View File

@ -14,19 +14,11 @@
# #
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>. # along with Scone. If not, see <https://www.gnu.org/licenses/>.
from pathlib import Path
from typing import List, Optional
from scone.common.modeutils import DEFAULT_MODE_FILE
from scone.default.steps.filesystem_steps import (
template_jinja2_builtin,
write_sous_file,
)
from scone.default.steps.systemd_steps import ( from scone.default.steps.systemd_steps import (
cook_systemd_daemon_reload, cook_systemd_daemon_reload,
cook_systemd_enable, cook_systemd_enable,
cook_systemd_start, cook_systemd_start,
cook_systemd_stop,
) )
from scone.head.head import Head from scone.head.head import Head
from scone.head.kitchen import Kitchen, Preparation from scone.head.kitchen import Kitchen, Preparation
@ -50,26 +42,14 @@ class SystemdUnit(Recipe):
unit = check_type(args.get("unit"), str) unit = check_type(args.get("unit"), str)
self.unit_name = unit if "." in unit else unit + ".service" self.unit_name = unit if "." in unit else unit + ".service"
already_installed = check_type(args.get("already_installed", False), bool) self.at = check_type(args.get("at"), str)
self.at = check_type_opt(args.get("at", None), str)
if not (already_installed or self.at):
# already_installed is for when the unit already exists on the system
# and is not created by scone.
raise ValueError(
"Must supply either already_installed = true or "
f"at = /path/to/{self.unit_name}"
)
self.enabled = check_type_opt(args.get("enabled"), bool) self.enabled = check_type_opt(args.get("enabled"), bool)
self.restart_on = check_type_opt(args.get("restart_on"), list) self.restart_on = check_type_opt(args.get("restart_on"), list)
self.started = check_type_opt(args.get("started"), bool) self.started = check_type_opt(args.get("started"), bool)
def prepare(self, preparation: Preparation, head: Head) -> None: def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head) super().prepare(preparation, head)
if self.at: # TODO(potential future): preparation.provides("systemd-unit", self.unit_name)
# TODO(potential future): preparation.provides("systemd-unit",
# self.unit_name)
preparation.needs("file", self.at) preparation.needs("file", self.at)
async def cook(self, kitchen: Kitchen) -> None: async def cook(self, kitchen: Kitchen) -> None:
@ -82,120 +62,3 @@ class SystemdUnit(Recipe):
if self.started is not None: if self.started is not None:
if self.started: if self.started:
await cook_systemd_start(kitchen, self.unit_name) await cook_systemd_start(kitchen, self.unit_name)
else:
await cook_systemd_stop(kitchen, self.unit_name)
class SystemdTimer(Recipe):
"""
Shorthand for creating a Systemd Service and associated Timer.
Two vague kinds of timer:
- monotonic timers
e.g. OnBootSec, OnUnitActiveSec
They stop if the computer is temporarily suspended or shut down.
Not supported at present.
- calendar timers
specified by OnCalendar, in the format:
`DayOfWeek Year-Month-Day Hour:Minute:Second`
where you can use * for ALL, comma (,) for multiple and
double-dot (..) for ranges.
"""
_NAME = "systemd-timer"
def __init__(self, recipe_context: RecipeContext, args: dict, head):
super().__init__(recipe_context, args, head)
self.unit_name = check_type(args.get("unit"), str)
assert "." not in self.unit_name and "/" not in self.unit_name
self.enabled = check_type(args.get("enabled", True), bool)
self.working_directory = check_type(args.get("cd"), str)
self.command = check_type(args.get("command"), str)
self.env = check_type_opt(args.get("environment"), dict)
self.user = check_type(args.get("user", recipe_context.user), str)
self.group = check_type(args.get("group", self.user), str)
self.description = check_type_opt(args.get("description"), str)
self.calendar = args.get("calendar")
if not (
self.calendar is None
or isinstance(self.calendar, str)
or isinstance(self.calendar, list)
):
raise TypeError(
"`calendar` should either be omitted, a string or a list of strings."
)
self.persistent = check_type_opt(args.get("persistent"), bool)
if self.calendar is None and self.persistent is not None:
raise TypeError(
"`persistent` may not be specified if `calendar` is not specified."
)
if self.calendar is None:
raise TypeError(
"Currently, only calendar/realtime timers are supported. "
"You must specify `calendar`..."
)
def prepare(self, preparation: Preparation, head: Head) -> None:
super().prepare(preparation, head)
preparation.needs("os-user", self.user)
preparation.needs("os-group", self.group)
preparation.needs("directory", self.working_directory)
async def cook(self, kitchen: Kitchen) -> None:
timer_template_path = Path(__file__).parent.parent.joinpath(
"templates", "systemd-timer.timer.j2"
)
service_template_path = Path(__file__).parent.parent.joinpath(
"templates", "systemd-timer.service.j2"
)
if isinstance(self.calendar, str):
calendars: Optional[List[str]] = [self.calendar]
elif isinstance(self.calendar, list):
calendars = self.calendar
else:
calendars = None
timer_unit = template_jinja2_builtin(
timer_template_path,
calendars=calendars,
persistent=self.persistent,
description=self.description
or f"{self.unit_name} (scone-generated timer timer)",
)
service_unit = template_jinja2_builtin(
service_template_path,
cd=self.working_directory,
command=self.command,
environment=self.env or {},
user=self.user,
group=self.group,
description=self.description
or f"{self.unit_name} (scone-generated timer service)",
)
await write_sous_file(
kitchen,
f"/etc/systemd/system/{self.unit_name}.timer",
DEFAULT_MODE_FILE,
timer_unit,
)
await write_sous_file(
kitchen,
f"/etc/systemd/system/{self.unit_name}.service",
DEFAULT_MODE_FILE,
service_unit,
)
if self.enabled is not None:
await cook_systemd_daemon_reload(kitchen)
await cook_systemd_enable(kitchen, self.enabled, self.unit_name + ".timer")
await cook_systemd_start(kitchen, self.unit_name + ".timer")

View File

@ -14,48 +14,11 @@
# #
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>. # along with Scone. If not, see <https://www.gnu.org/licenses/>.
from pathlib import Path
from typing import Any, Union
from jinja2 import DictLoader, Environment from scone.default.utensils.basic_utensils import HashFile
from scone.default.steps.basic_steps import exec_no_fails
from scone.default.utensils.basic_utensils import WriteFile
from scone.head.kitchen import Kitchen from scone.head.kitchen import Kitchen
async def depend_remote_file(path: str, kitchen: Kitchen) -> None: async def depend_remote_file(path: str, kitchen: Kitchen) -> None:
# TODO not supported yet :( sha256 = await kitchen.ut1(HashFile(path))
# sha256 = await kitchen.ut1(HashFile(path)) kitchen.get_dependency_tracker().register_remote_file(path, sha256)
# kitchen.get_dependency_tracker().register_remote_file(path, sha256)
kitchen.get_dependency_tracker().register_remote_file(path)
def template_jinja2_builtin(path: Path, **template_vars: Any) -> str:
template_text = path.read_text("UTF-8")
try:
env = Environment(
loader=DictLoader({str(path): template_text}), autoescape=False
)
template = env.get_template(str(path))
return template.render(template_vars)
except Exception as e:
raise RuntimeError(f"Error templating built-in: {path}") from e
async def write_sous_file(
kitchen: Kitchen, path: str, mode: int, data: Union[str, bytes]
) -> None:
if isinstance(data, str):
data = data.encode("UTF-8")
chan = await kitchen.start(WriteFile(path, mode))
await chan.send(data)
await chan.send(None)
if await chan.recv() != "OK":
raise RuntimeError(f"WriteFile failed to {path}")
async def delete_sous_file(kitchen: Kitchen, path: str):
await exec_no_fails(kitchen, ["rm", path], "/")

View File

@ -14,7 +14,7 @@
# #
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>. # along with Scone. If not, see <https://www.gnu.org/licenses/>.
import os
from enum import Enum from enum import Enum
from pathlib import Path, PurePath from pathlib import Path, PurePath
from typing import List, Optional, Tuple, Union from typing import List, Optional, Tuple, Union
@ -23,7 +23,6 @@ from jinja2 import DictLoader, Environment
from scone.head.head import Head from scone.head.head import Head
from scone.head.kitchen import Kitchen from scone.head.kitchen import Kitchen
from scone.head.variables import Variables
SUPERMARKET_RELATIVE = ".scone-cache/supermarket" SUPERMARKET_RELATIVE = ".scone-cache/supermarket"
@ -82,7 +81,7 @@ def decode_fridge_extension(path: str) -> Tuple[str, FridgeMetadata]:
async def load_and_transform( async def load_and_transform(
kitchen: Kitchen, meta: FridgeMetadata, fullpath: Path, variables: Variables kitchen: Kitchen, meta: FridgeMetadata, fullpath: Path, sous: str
) -> bytes: ) -> bytes:
head = kitchen.head head = kitchen.head
# TODO(perf) don't do this in async loop # TODO(perf) don't do this in async loop
@ -100,7 +99,9 @@ async def load_and_transform(
loader=DictLoader({str(fullpath): data.decode()}), autoescape=False loader=DictLoader({str(fullpath): data.decode()}), autoescape=False
) )
template = env.get_template(str(fullpath)) template = env.get_template(str(fullpath))
proxies = kitchen.get_dependency_tracker().get_j2_var_proxies(variables) proxies = kitchen.get_dependency_tracker().get_j2_var_proxies(
head.variables[sous]
)
data = template.render(proxies).encode() data = template.render(proxies).encode()
except Exception as e: except Exception as e:
raise RuntimeError(f"Error templating: {fullpath}") from e raise RuntimeError(f"Error templating: {fullpath}") from e
@ -113,57 +114,3 @@ async def load_and_transform(
# template.environment.handle_exception() # template.environment.handle_exception()
return data return data
def _find_files_in_dir(relative: str, dir: Path) -> List[Tuple[str, str, Path]]:
"""
:param relative:
:param dir:
:return: Tuple of (
relative path with prefix included,
relative path with prefix not included,
path to local file
)
"""
result = []
num_prefix_parts = len(dir.parts)
for root, dirs, files in os.walk(dir):
for file in files:
full_path = Path(root, file)
parts = full_path.parts
if parts[0:num_prefix_parts] != dir.parts:
raise RuntimeError(f"{parts[0:num_prefix_parts]!r} != {dir.parts!r}")
dir_relative_path = "/".join(parts[num_prefix_parts:])
result.append(
(relative + "/" + dir_relative_path, dir_relative_path, full_path)
)
return result
def search_children_in_fridge(
head: Head, relative: Union[str, PurePath]
) -> Optional[List[Tuple[str, str, Path]]]:
"""
Similar to `search_in_fridge` but finds (recursively) ALL children of a named
directory. This 'directory' can be split across multiple fridge search paths.
"""
fridge_dirs = get_fridge_dirs(head)
results = []
# only the first file found for a path counts — this allows overrides
found_filenames = set()
for directory in fridge_dirs:
potential_path = directory.joinpath(relative)
if potential_path.exists():
# find children
for rel, rel_unprefixed, file in _find_files_in_dir(
str(relative), potential_path
):
unextended_name, _transformer = decode_fridge_extension(rel)
if unextended_name in found_filenames:
continue
results.append((rel, rel_unprefixed, file))
found_filenames.add(unextended_name)
return results

View File

@ -24,10 +24,7 @@ async def cook_systemd_enable(kitchen: Kitchen, enabled: bool, unit_name: str):
# systemctl show -p FragmentPath apache2.service # systemctl show -p FragmentPath apache2.service
result = await kitchen.ut1areq( result = await kitchen.ut1areq(
SimpleExec( SimpleExec(["systemctl", "enable" if enabled else "disable", unit_name], "/",),
["systemctl", "enable" if enabled else "disable", unit_name],
"/",
),
SimpleExec.Result, SimpleExec.Result,
) )
@ -39,11 +36,7 @@ async def cook_systemd_enable(kitchen: Kitchen, enabled: bool, unit_name: str):
async def cook_systemd_daemon_reload(kitchen): async def cook_systemd_daemon_reload(kitchen):
result = await kitchen.ut1areq( result = await kitchen.ut1areq(
SimpleExec( SimpleExec(["systemctl", "daemon-reload"], "/",), SimpleExec.Result,
["systemctl", "daemon-reload"],
"/",
),
SimpleExec.Result,
) )
if result.exit_code != 0: if result.exit_code != 0:
@ -52,25 +45,8 @@ async def cook_systemd_daemon_reload(kitchen):
async def cook_systemd_start(kitchen: Kitchen, unit_name: str): async def cook_systemd_start(kitchen: Kitchen, unit_name: str):
result = await kitchen.ut1areq( result = await kitchen.ut1areq(
SimpleExec( SimpleExec(["systemctl", "start", unit_name], "/",), SimpleExec.Result,
["systemctl", "start", unit_name],
"/",
),
SimpleExec.Result,
) )
if result.exit_code != 0: if result.exit_code != 0:
raise RuntimeError(f"Failed to start {unit_name}: {result.stderr.decode()}") raise RuntimeError(f"Failed to start {unit_name}: {result.stderr.decode()}")
async def cook_systemd_stop(kitchen: Kitchen, unit_name: str):
result = await kitchen.ut1areq(
SimpleExec(
["systemctl", "stop", unit_name],
"/",
),
SimpleExec.Result,
)
if result.exit_code != 0:
raise RuntimeError(f"Failed to stop {unit_name}: {result.stderr.decode()}")

View File

@ -1,17 +0,0 @@
# This unit has been generated and installed by Scone. DO NOT EDIT.
[Unit]
Description={{ description }}
[Service]
# oneshot will consider the unit started when the process ends
#Type=oneshot
Type=simple
Restart=no
WorkingDirectory={{ cd }}
ExecStart={{ command }}
{% for env_k, env_v in environment.items() %}
Environment={{ env_k }}={{ env_v }}
{% endfor %}
User={{ user }}
Group={{ group }}

View File

@ -1,19 +0,0 @@
# This unit has been generated and installed by Scone. DO NOT EDIT.
[Unit]
Description={{ description }}
[Timer]
{% for calendar in calendars %}
OnCalendar={{ calendar }}
{% endfor %}
# examples:
# OnCalendar=weekly
# OnCalendar=*-*-* 04:00:00
# OnCalendar=Sat,Mon *-6,12-1..7 04:00:00
{% if persistent is not none %}
Persistent={{ persistent }}
{% endif %}
[Install]
WantedBy=timers.target

View File

@ -22,7 +22,7 @@ import os
import pwd import pwd
import shutil import shutil
import stat import stat
from typing import Dict, List, Optional from typing import List
import attr import attr
@ -35,13 +35,10 @@ from scone.sous.utensils import Utensil, Worktop
class WriteFile(Utensil): class WriteFile(Utensil):
path: str path: str
mode: int mode: int
atomic: bool = attr.ib(default=True)
async def execute(self, channel: Channel, worktop): async def execute(self, channel: Channel, worktop):
oldumask = os.umask(0) oldumask = os.umask(0)
temp_path = self.path + "._scone-part" fdnum = os.open(self.path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, self.mode)
write_path = temp_path if self.atomic else self.path
fdnum = os.open(write_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, self.mode)
os.umask(oldumask) os.umask(oldumask)
with open(fdnum, "wb") as file: with open(fdnum, "wb") as file:
@ -52,47 +49,9 @@ class WriteFile(Utensil):
assert isinstance(next_chunk, bytes) assert isinstance(next_chunk, bytes)
file.write(next_chunk) file.write(next_chunk)
if self.atomic:
shutil.move(temp_path, self.path)
await channel.send("OK") await channel.send("OK")
@attr.s(auto_attribs=True)
class WriteBlockInFile(Utensil):
path: str
mode: int
marker_line_prefix: str
marker_name: str
data: str
async def execute(self, channel: Channel, worktop):
start_marker = self.marker_line_prefix + "BEGIN " + self.marker_name + "\n"
end_marker = self.marker_line_prefix + "END " + self.marker_name + "\n"
if os.path.exists(self.path):
with open(self.path, "r") as fin:
file_lines = fin.readlines()
try:
start_index = file_lines.index(start_marker)
end_index = file_lines.index(end_marker)
file_lines = file_lines[:start_index] + file_lines[end_index + 1 :]
except ValueError:
pass
else:
file_lines = []
file_lines.append(start_marker + self.data + "\n" + end_marker)
oldumask = os.umask(0)
fdnum = os.open(self.path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, self.mode)
os.umask(oldumask)
with open(fdnum, "w") as file:
file.writelines(file_lines)
@attr.s(auto_attribs=True) @attr.s(auto_attribs=True)
class MakeDirectory(Utensil): class MakeDirectory(Utensil):
path: str path: str
@ -158,8 +117,8 @@ class Stat(Utensil):
@attr.s(auto_attribs=True) @attr.s(auto_attribs=True)
class Chown(Utensil): class Chown(Utensil):
path: str path: str
user: Optional[str] user: str
group: Optional[str] group: str
async def execute(self, channel: Channel, worktop): async def execute(self, channel: Channel, worktop):
shutil.chown(self.path, self.user, self.group) shutil.chown(self.path, self.user, self.group)
@ -178,7 +137,6 @@ class Chmod(Utensil):
class SimpleExec(Utensil): class SimpleExec(Utensil):
args: List[str] args: List[str]
working_dir: str working_dir: str
environment: Optional[Dict[str, str]] = None
@attr.s(auto_attribs=True) @attr.s(auto_attribs=True)
class Result: class Result:
@ -192,7 +150,6 @@ class SimpleExec(Utensil):
stdin=None, stdin=None,
stdout=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE,
env=self.environment,
cwd=self.working_dir cwd=self.working_dir
) )

View File

@ -24,11 +24,6 @@ try:
except ImportError: except ImportError:
asyncpg = None asyncpg = None
try:
from mysql import connector as mysql_connector
except ImportError:
mysql_connector = None
from scone.common.chanpro import Channel from scone.common.chanpro import Channel
from scone.sous import Utensil from scone.sous import Utensil
from scone.sous.utensils import Worktop from scone.sous.utensils import Worktop
@ -80,56 +75,3 @@ class PostgresTransaction(Utensil):
await queryloop() await queryloop()
finally: finally:
await conn.close() await conn.close()
@attr.s(auto_attribs=True)
class MysqlTransaction(Utensil):
database: str
user: str
unix_socket: bool = False
async def execute(self, channel: Channel, worktop: Worktop) -> None:
if not mysql_connector:
raise RuntimeError("mysql-connector-python is not installed.")
async def queryloop():
while True:
next_input = await channel.recv()
if next_input is None:
return
query, *args = next_input
if query is None:
break
try:
cur.execute(query, tuple(args))
if conn.unread_result:
names = cur.column_names
results = [
dict(zip(names, rectuple)) for rectuple in cur.fetchall()
]
else:
results = []
except mysql_connector.errors.Error:
logger.error(
"Failed query %s with args %r", query, args, exc_info=True
)
await channel.close("Query error")
raise
await channel.send(results)
# TODO(perf): make async
unix_socket = "/var/run/mysqld/mysqld.sock" if self.unix_socket else None
conn = mysql_connector.connect(
database=self.database, user=self.user, unix_socket=unix_socket
)
cur = conn.cursor()
try:
await queryloop()
# autocommit disabled in this mode by default
conn.commit()
finally:
conn.close()

View File

@ -1,14 +1,11 @@
from enum import Enum from typing import Optional
from typing import Dict, List, Optional, Tuple, Union
import attr import attr
try: try:
import docker.errors import docker.errors
from docker.models.containers import Container
except ImportError: except ImportError:
docker = None docker = None
Container = None
from scone.common.chanpro import Channel from scone.common.chanpro import Channel
from scone.sous import Utensil from scone.sous import Utensil
@ -18,101 +15,36 @@ _docker_client_instance = None
def _docker_client(): def _docker_client():
if not docker:
# check docker is actually installed and give a message with the resolution
# when it isn't.
raise RuntimeError(
"You need to install docker from PyPI to use these utensils!"
)
global _docker_client_instance global _docker_client_instance
if not _docker_client_instance: if not _docker_client_instance:
_docker_client_instance = docker.from_env() _docker_client_instance = docker.from_env()
return _docker_client_instance return _docker_client_instance
class ContainerState(Enum):
NOTFOUND = 0
RUNNING = 1
EXITED = 2
RESTARTING = 3
@attr.s(auto_attribs=True)
class DockerContainerState(Utensil):
# Name of the container to check the existence of.
name: str
async def execute(self, channel: Channel, worktop: Worktop):
client = _docker_client()
# this is essentially `docker ps -a`
# TODO(perf) run this in a threaded executor since docker can be slow.
for container in client.containers.list(all=True):
# container: Container
if self.name == container.name:
if container.status == "running":
await channel.send(ContainerState.RUNNING.value)
elif container.status == "exited":
await channel.send(ContainerState.EXITED.value)
elif container.status == "restarting":
await channel.send(ContainerState.RESTARTING.value)
else:
raise ValueError(f"Unknown container status: {container.status}")
break
else:
await channel.send(ContainerState.NOTFOUND.value)
@attr.s(auto_attribs=True) @attr.s(auto_attribs=True)
class DockerContainerRun(Utensil): class DockerContainerRun(Utensil):
# Name of the image to use to create the container.
image: str image: str
# Command to create the container with. Optional. command: str
command: Optional[str]
# Custom name to give the container.
name: str
# Ports to bind inside the container
# {'2222/tcp': ('127.0.0.1', 3333)} will expose port 2222 inside as 3333 outside.
ports: Dict[str, Tuple[str, int]]
# Volumes to mount inside the container.
# Key is either a host path or a container name.
# Value is a dictionary with the keys of:
# bind = path to bind inside the container
# mode = 'rw' or 'ro'
volumes: Dict[str, Dict[str, str]]
# Environment variables
environment: Dict[str, str]
# Restart policy
restart_policy: str
# Capabilities to add
cap_add: List[str]
# Capabilities to drop
cap_drop: List[str]
@attr.s(auto_attribs=True) @attr.s(auto_attribs=True)
class Result: class Result:
name: str name: str
async def execute(self, channel: Channel, worktop: Worktop): async def execute(self, channel: Channel, worktop: Worktop):
restart_policy: Dict[str, Union[int, str]] = { try:
"Name": self.restart_policy,
}
if self.restart_policy == "on-failure":
restart_policy["MaximumRetryCount"] = 5
container = _docker_client().containers.run( container = _docker_client().containers.run(
self.image, self.image, self.command, detach=True
self.command,
detach=True,
name=self.name,
ports=self.ports,
volumes=self.volumes,
environment=self.environment,
restart_policy=restart_policy,
cap_add=self.cap_add,
cap_drop=self.cap_drop,
) )
except docker.errors.ImageNotFound:
# specified image does not exist (or requires login)
await channel.send(None)
return
except docker.errors.APIError:
# the docker server returned an error
await channel.send(None)
return
await channel.send(DockerContainerRun.Result(name=container.name)) await channel.send(DockerContainerRun.Result(name=container.name))

View File

@ -14,7 +14,7 @@
# #
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>. # along with Scone. If not, see <https://www.gnu.org/licenses/>.
import grp
import pwd import pwd
import attr import attr
@ -50,25 +50,3 @@ class GetPasswdEntry(Utensil):
shell=entry.pw_shell, shell=entry.pw_shell,
) )
) )
@attr.s(auto_attribs=True)
class GetGroupEntry(Utensil):
group_name: str
@attr.s(auto_attribs=True)
class Result:
gid: int
async def execute(self, channel: Channel, worktop: Worktop):
try:
entry = grp.getgrnam(self.group_name)
except KeyError:
await channel.send(None)
return
await channel.send(
GetGroupEntry.Result(
gid=entry.gr_gid,
)
)

View File

@ -45,13 +45,6 @@ async def cli_async() -> int:
parser = ArgumentParser(description="Cook!") parser = ArgumentParser(description="Cook!")
parser.add_argument("hostspec", type=str, help="Sous or group name") parser.add_argument("hostspec", type=str, help="Sous or group name")
parser.add_argument(
"--menu",
"-m",
type=str,
help="Specify a comma-separated list of names of menu to cook. "
"If not specified, all menus will be cooked.",
)
parser.add_argument( parser.add_argument(
"--yes", "--yes",
"-y", "-y",
@ -71,10 +64,6 @@ async def cli_async() -> int:
eprint("Don't appear to be in a head. STOP.") eprint("Don't appear to be in a head. STOP.")
return 1 return 1
menu_subset = None
if argp.menu:
menu_subset = argp.menu.split(",")
head = Head.open(str(cdir)) head = Head.open(str(cdir))
eprint(head.debug_info()) eprint(head.debug_info())
@ -92,10 +81,6 @@ async def cli_async() -> int:
eprint(f"Selected the following souss: {', '.join(hosts)}") eprint(f"Selected the following souss: {', '.join(hosts)}")
# Load variables for the head as well.
head.load_variables(hosts)
head.load_menus(menu_subset, hosts)
eprint("Preparing recipes…") eprint("Preparing recipes…")
prepare = Preparation(head) prepare = Preparation(head)
@ -112,8 +97,22 @@ async def cli_async() -> int:
os.path.join(head.directory, "depcache.sqlite3") os.path.join(head.directory, "depcache.sqlite3")
) )
kitchen = Kitchen(head, dep_cache) # eprint("Checking dependency cache…")
await kitchen.prepare_to_cook() # start_ts = time.monotonic()
# depchecks = await run_dep_checks(head, dep_cache, order)
# end_ts = time.monotonic()
# eprint(f"Checking finished in {end_ts - start_ts:.3f} s.") # TODO show counts
#
# for epoch, items in enumerate(order):
# print(f"----- Course {epoch} -----")
#
# for item in items:
# if isinstance(item, Recipe):
# state = depchecks[item].label.name
# print(f" > recipe ({state}) {item}")
# elif isinstance(item, tuple):
# kind, ident, extra = item
# print(f" - we now have {kind} {ident} {dict(extra)}")
eprint("Ready to cook? [y/N]: ", end="") eprint("Ready to cook? [y/N]: ", end="")
if argp.yes: if argp.yes:
@ -123,11 +122,21 @@ async def cli_async() -> int:
eprint("Stopping.") eprint("Stopping.")
return 101 return 101
kitchen = Kitchen(head, dep_cache)
# for epoch, epoch_items in enumerate(order):
# print(f"Cooking Course {epoch} of {len(order)}")
# await kitchen.run_epoch(
# epoch_items, depchecks, concurrency_limit_per_host=8
# )
#
# for sous in hosts: TODO this is not definitely safe
# await dep_cache.sweep_old(sous)
try: try:
await kitchen.cook_all() await kitchen.cook_all()
finally: finally:
dot_emitter.emit_dot(head.dag, Path(cdir, "dag.9.dot")) dot_emitter.emit_dot(head.dag, Path(cdir, "dag.9.dot"))
await kitchen.close_all_ssh_connections()
return 0 return 0
finally: finally:

View File

@ -92,32 +92,13 @@ class Resource:
though should only be used where necessary and sensible to do so. though should only be used where necessary and sensible to do so.
""" """
# extra_params: Optional[frozendict[str, str]] = None # extra_params: Optional[frozendict[str, str]] = None
extra_params: Optional[frozendict] = attr.ib(default=None) extra_params: Optional[frozendict] = None
def __str__(self) -> str: def __str__(self) -> str:
extra_str = "" if not self.extra_params else f" {self.extra_params!r}" extra_str = "" if not self.extra_params else f" {self.extra_params!r}"
sous_str = "" if not self.sous else f" on {self.sous}" sous_str = "" if not self.sous else f" on {self.sous}"
return f"{self.kind}({self.id}){extra_str}{sous_str}" return f"{self.kind}({self.id}){extra_str}{sous_str}"
@extra_params.validator
def _check_extra_params(self, _attribute, value):
if value is not None and len(value) == 0:
raise ValueError(
"Resources must not contain an empty extra_params dict."
" Use None instead."
)
@staticmethod
def new_lenient(
kind: str, id: str, sous: Optional[str], extra_params: Optional[frozendict]
):
"""
Alternative constructor which will correct an empty extra_params dict.
"""
if extra_params is not None and len(extra_params) == 0:
extra_params = None
return Resource(kind, id, sous, extra_params)
@attr.s(auto_attribs=True) @attr.s(auto_attribs=True)
class ResourceMeta: class ResourceMeta:
@ -156,9 +137,6 @@ class RecipeDag:
self.resource_time: Dict[Resource, int] = dict() self.resource_time: Dict[Resource, int] = dict()
# true for 'only when changed', false for '@when changed'
self.watching: Dict[Recipe, Dict[Vertex, bool]] = defaultdict(dict)
def add(self, vertex: Vertex): def add(self, vertex: Vertex):
self.vertices.add(vertex) self.vertices.add(vertex)
if isinstance(vertex, Recipe): if isinstance(vertex, Recipe):
@ -233,10 +211,3 @@ class RecipeDag:
after_meta.incoming_uncompleted += 1 after_meta.incoming_uncompleted += 1
# TODO if after_meta.state == # TODO if after_meta.state ==
# TODO else ... # TODO else ...
def watches(self, recipe: "Recipe", watching: Vertex, only_when: bool) -> None:
if watching not in self.edges or recipe not in self.edges[watching]:
raise ValueError(
f"{recipe} needs to be after {watching} before it can watch it."
)
self.watching[recipe][watching] = only_when

View File

@ -20,7 +20,7 @@ import logging
import time import time
from copy import deepcopy from copy import deepcopy
from hashlib import sha256 from hashlib import sha256
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union from typing import TYPE_CHECKING, Any, Dict, Optional, Tuple, Union
import aiosqlite import aiosqlite
import attr import attr
@ -75,15 +75,12 @@ def paramhash_recipe(recipe: "Recipe") -> str:
@attr.s(auto_attribs=True) @attr.s(auto_attribs=True)
class DependencyBook: class DependencyBook:
provided: Dict[Resource, int] = attr.attrib(factory=dict) provided: Dict[Resource, int] = dict()
watching: Dict[Resource, int] = attr.attrib(factory=dict) watching: Dict[Resource, int] = dict()
last_changed: int = 0 last_changed: int = 0
cache_data: Dict[str, Any] = attr.attrib(factory=dict) cache_data: Dict[str, Any] = dict()
ignored: bool = False ignored: bool = False
var_list: List[str] = attr.attrib(factory=list)
varhash: str = ""
# TODO(performance, feature): track more in-depth details, perhaps as a # TODO(performance, feature): track more in-depth details, perhaps as a
# per-resource cache thing, so that we can track the info needed to know # per-resource cache thing, so that we can track the info needed to know
# if it changed...? # if it changed...?
@ -95,12 +92,10 @@ class DependencyBook:
"last_changed": self.last_changed, "last_changed": self.last_changed,
"cache_data": self.cache_data, "cache_data": self.cache_data,
"ignored": self.ignored, "ignored": self.ignored,
"var_list": self.var_list,
"varhash": self.varhash,
} }
@staticmethod @staticmethod
def _structure(dictionary: dict, _class: Any) -> "DependencyBook": def _structure(dictionary: dict) -> "DependencyBook":
provided = {cattr.structure(k, Resource): v for k, v in dictionary["provided"]} provided = {cattr.structure(k, Resource): v for k, v in dictionary["provided"]}
watching = {cattr.structure(k, Resource): v for k, v in dictionary["watching"]} watching = {cattr.structure(k, Resource): v for k, v in dictionary["watching"]}
@ -110,8 +105,6 @@ class DependencyBook:
last_changed=dictionary["last_changed"], last_changed=dictionary["last_changed"],
cache_data=dictionary["cache_data"], cache_data=dictionary["cache_data"],
ignored=dictionary["ignored"], ignored=dictionary["ignored"],
var_list=dictionary["var_list"],
varhash=dictionary["varhash"],
) )
@ -125,43 +118,27 @@ cattr.global_converter.register_structure_hook(
class DependencyTracker: class DependencyTracker:
def __init__(self, book: DependencyBook, dag: "RecipeDag", recipe: "Recipe"): def __init__(self, book: DependencyBook, dag: "RecipeDag", recipe: "Recipe"):
self._book: DependencyBook = book self.book: DependencyBook = book
self._dag: "RecipeDag" = dag self._dag: "RecipeDag" = dag
self._recipe: "Recipe" = recipe self._recipe: "Recipe" = recipe
self._time: int = int(time.time() * 1000) self._time: int = int(time.time() * 1000)
self._vars: Dict[str, Any] = dict()
def build_book(self) -> DependencyBook:
self._book.varhash = hash_dict(self._vars)
self._book.var_list = sorted(self._vars.keys())
return self._book
def watch(self, resource: Resource) -> None: def watch(self, resource: Resource) -> None:
try: # XXX self.book.watching[resource] = self._dag.resource_time[resource]
self._book.watching[resource] = self._dag.resource_time[resource] self.book.watching[resource] = -42
except KeyError as ke:
raise RuntimeError(
f"Can't watch {resource!r} because it hasn't been provided (yet)!"
) from ke
def provide(self, resource: Resource, time: Optional[int] = None) -> None: def provide(self, resource: Resource, time: Optional[int] = None) -> None:
if time is None: if time is None:
time = self._time time = self._time
# We use the maximum time because multiple recipes may provide something self._dag.resource_time[resource] = time
# and we should be careful to define a consistent behaviour in this case
max_provision_time = max(time, self._dag.resource_time.get(resource, -1))
self._dag.resource_time[resource] = max_provision_time
self._book.provided[resource] = max_provision_time
def ignore(self) -> None: def ignore(self) -> None:
self._book.ignored = True self.book.ignored = True
def register_variable( def register_variable(self, variable: str, value: Union[dict, str, int]):
self, variable: str, value: Union[dict, str, int, float, bool] # self._vars[variable] = value
): # TODO(implement)
# store a copy and we'll read it later logger.critical("not implemented: register var %s", variable)
self._vars[variable] = value
def register_fridge_file(self, desugared_path: str): def register_fridge_file(self, desugared_path: str):
# TODO this is not complete # TODO this is not complete
@ -196,30 +173,7 @@ class DependencyVarProxy:
self._vars = vars self._vars = vars
self._tracker = tracker self._tracker = tracker
def __str__(self): def raw_(self) -> Dict[str, Any]:
"""
Allows use a top-level stringy variable directly in a template.
"""
if not self._current_path_prefix:
return repr(self)
raw_value = self._vars.get_dotted(self._current_path_prefix)
if (
isinstance(raw_value, str)
or isinstance(raw_value, int)
or isinstance(raw_value, float)
or isinstance(raw_value, bool)
):
self._tracker.register_variable(self._current_path_prefix, raw_value)
return str(raw_value)
else:
raise ValueError(
f"Trying to stringify a variable proxy which contains {repr(self)}"
)
def raw_(self) -> Union[Dict[str, Any], List[Any], int, str, bool]:
if not self._current_path_prefix: if not self._current_path_prefix:
raw_dict = self._vars.toplevel() raw_dict = self._vars.toplevel()
else: else:
@ -240,19 +194,6 @@ class DependencyVarProxy:
self._tracker.register_variable(dotted_path, raw_value) self._tracker.register_variable(dotted_path, raw_value)
return raw_value return raw_value
def __iter__(self):
raw = self.raw_()
if not isinstance(raw, dict) and not isinstance(raw, list):
raise TypeError("Trying to iterate over non-iterable")
return iter(raw)
def __contains__(self, item):
raw = self.raw_()
if isinstance(raw, list) or isinstance(raw, str) or isinstance(raw, dict):
return item in raw
else:
raise TypeError("Not a container")
class DependencyCache: class DependencyCache:
def __init__(self): def __init__(self):
@ -301,10 +242,7 @@ class DependencyCache:
AND paramhash = ? AND paramhash = ?
LIMIT 1 LIMIT 1
""", """,
( (recipe_name_getter(recipe.__class__), paramhash,),
recipe_name_getter(recipe.__class__),
paramhash,
),
) )
rows = list(rows) rows = list(rows)
if not rows: if not rows:

View File

@ -1,16 +0,0 @@
# Copyright 2020, Olivier 'reivilibre'.
#
# This file is part of Scone.
#
# Scone is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Scone is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>.

View File

@ -19,7 +19,7 @@ RecipeOrSubBlock:
SubBlock[ws=' \t']: SubBlock[ws=' \t']:
unique_id=IdString '{' human=/.*$/ /\n/+ unique_id=ID '{' human=/.*$/ /\n/+
block=Block block=Block
'}' /\n/+ '}' /\n/+
; ;
@ -27,20 +27,19 @@ SubBlock[ws=' \t']:
Directive: Directive:
UserDirective | SousDirective | ForDirective | ImportDirective | UserDirective | SousDirective | ForDirective | ImportDirective |
RecipeEdgeDirective | ResourceEdgeDirective | ListenEdgeDirective | RecipeEdgeDirective | ResourceEdgeDirective | ListenEdgeDirective
IfSetDirective | IfCondDirective
; ;
UserDirective[ws=' \t']: UserDirective[ws=' \t']:
'@user' '=' (user=IdString | user=VarInterpolationString) /\n/+ '@user' '=' user=ID /\n/+
; ;
SousDirective[ws=' \t']: SousDirective[ws=' \t']:
'@sous' '=' sous=IdString /\n/+ '@sous' '=' sous=ID /\n/+
; ;
ImportDirective[ws=' \t']: ImportDirective[ws=' \t']:
'@import' importee=IdString /\n/+ '@import' importee=ID /\n/+
; ;
@ -54,14 +53,6 @@ ForDirective[ws=' \t']:
) )
; ;
IfSetDirective[ws=' \t']:
'@ifSet' variable=DottedIdString /\n/+
;
IfCondDirective[ws=' \t']:
'@if' variable=DottedIdString operator='=' other_value=ValueExpr /\n/+
;
ResourceEdgeDirectiveKind: ResourceEdgeDirectiveKind:
'@needs' | '@wants' | '@provides' '@needs' | '@wants' | '@provides'
; ;
@ -78,7 +69,7 @@ RecipeEdgeDirectiveKind:
RecipeEdgeDirective[ws=' \t']: RecipeEdgeDirective[ws=' \t']:
kind=RecipeEdgeDirectiveKind kind=RecipeEdgeDirectiveKind
':' id=IdString ':' id=ID
// TODO 'on other sous' ? // TODO 'on other sous' ?
/\n/+ /\n/+
; ;
@ -89,15 +80,15 @@ ListenEdgeDirectiveKind:
ListenEdgeDirective[ws=' \t']: ListenEdgeDirective[ws=' \t']:
kind=ListenEdgeDirectiveKind kind=ListenEdgeDirectiveKind
((':' recipe_id=IdString) | resource=Resource) (recipe_id=ID | resource=Resource)
'changes' /\n/+ 'changes'
; ;
Resource: Resource:
type=IdString '(' (primary=UnquotedString | primary=QuotedString) ')' type=ID '(' (primary=UnquotedString | primary=QuotedString) ')'
(extra_params=BraceDict)? (extra_params=BraceDict)?
('on' sous=IdString)? ('on' sous=ID)?
; ;
@ -142,21 +133,13 @@ QuotedString:
; ;
UnquotedString: UnquotedString:
value=/[^]\s\n,"(){}[0-9=]([^]\n,"(){}[=]*[^]\s\n,"(){}[=])?/ value=/[^\s\n,"()0-9]([^\n,"()]*[^\s\n,"()])?/
; ;
DottedIdString: DottedIdString:
/[a-zA-Z_-][a-zA-Z0-9_\.-]*/ /[a-zA-Z_-][a-zA-Z0-9_\.-]*/
; ;
IdString:
/[a-zA-Z_-][a-zA-Z0-9_-]*/
;
VarInterpolationString:
'${' DottedIdString '}'
;
Integer: Integer:
value=INT value=INT
; ;
@ -171,12 +154,12 @@ BracketList[ws=' \t\n']:
']' ']'
; ;
BraceDict[ws=' \t\n']: BraceDict[ws=' \t']:
'{' '{'
pairs*=DictPair[','] pairs*=DictPair[',']
'}' '}'
; ;
DictPair[ws=' \t\n']: DictPair:
(key=KeyExpr) '=' (value=ValueExpr) (key=KeyExpr) '=' (value=ValueExpr)
; ;

View File

@ -21,7 +21,7 @@ import re
import sys import sys
from os import path from os import path
from pathlib import Path from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, cast from typing import Any, Dict, Iterable, List, Optional, Tuple, cast
import toml import toml
from nacl.encoding import URLSafeBase64Encoder from nacl.encoding import URLSafeBase64Encoder
@ -37,8 +37,6 @@ from scone.head.variables import Variables, merge_right_into_left_inplace
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
SPECIAL_HEAD_SOUS = "head"
class Head: class Head:
def __init__( def __init__(
@ -54,8 +52,6 @@ class Head:
self.recipe_loader = recipe_loader self.recipe_loader = recipe_loader
self.dag = RecipeDag() self.dag = RecipeDag()
self.souss = sous self.souss = sous
# Special override: head sous.
self.souss[SPECIAL_HEAD_SOUS] = {"user": ""}
self.groups = groups self.groups = groups
self.secret_access = secret_access self.secret_access = secret_access
self.variables: Dict[str, Variables] = dict() self.variables: Dict[str, Variables] = dict()
@ -83,12 +79,13 @@ class Head:
sous = head_data.get("sous", dict()) sous = head_data.get("sous", dict())
groups = head_data.get("group", dict()) groups = head_data.get("group", dict())
groups["all_plus_head"] = list(sous.keys()) groups["all"] = list(sous.keys())
groups["all"] = list(sous.keys() - "head")
pools = Pools() pools = Pools()
head = Head(directory, recipe_loader, sous, groups, secret_access, pools) head = Head(directory, recipe_loader, sous, groups, secret_access, pools)
head._load_variables()
head._load_menus()
return head return head
def _preload_variables(self, who_for: str) -> Tuple[dict, dict]: def _preload_variables(self, who_for: str) -> Tuple[dict, dict]:
@ -96,10 +93,6 @@ class Head:
out_chilled: Dict[str, Any] = {} out_chilled: Dict[str, Any] = {}
vardir = Path(self.directory, "vars", who_for) vardir = Path(self.directory, "vars", who_for)
# TODO(feature): is this needed?
# if not vardir.exists():
# return out_chilled, out_frozen
logger.debug("preloading vars for %s in %s", who_for, str(vardir)) logger.debug("preloading vars for %s in %s", who_for, str(vardir))
for file in vardir.glob("*.vf.toml"): for file in vardir.glob("*.vf.toml"):
@ -138,16 +131,12 @@ class Head:
return out_chilled, out_frozen return out_chilled, out_frozen
def load_variables(self, host_subset: Optional[Set[str]]): def _load_variables(self):
preload: Dict[str, Tuple[dict, dict]] = dict() preload: Dict[str, Tuple[dict, dict]] = dict()
for who_name in itertools.chain(self.souss, self.groups): for who_name in itertools.chain(self.souss, self.groups):
# TODO(performance): don't preload vars for deselected souss and
# groups
preload[who_name] = self._preload_variables(who_name) preload[who_name] = self._preload_variables(who_name)
for sous_name in self.souss: for sous_name in self.souss:
if host_subset and sous_name not in host_subset:
continue
order = ["all"] order = ["all"]
order += [ order += [
group group
@ -170,14 +159,10 @@ class Head:
self.variables[sous_name] = sous_vars self.variables[sous_name] = sous_vars
def load_menus(self, subset: Optional[List[str]], host_subset: Set[str]): def _load_menus(self):
loader = MenuLoader(Path(self.directory, "menu"), self) loader = MenuLoader(Path(self.directory, "menu"), self)
if subset:
for unit in subset:
loader.load(unit)
else:
loader.load_menus_in_dir() loader.load_menus_in_dir()
loader.dagify_all(host_subset) loader.dagify_all()
# TODO remove # TODO remove
# def _construct_hostmenu_for( # def _construct_hostmenu_for(

View File

@ -17,14 +17,9 @@
import asyncio import asyncio
import logging import logging
import os
import signal
import time
import traceback
from asyncio import Future, Queue from asyncio import Future, Queue
from collections import deque from collections import deque
from contextvars import ContextVar from contextvars import ContextVar
from pathlib import Path
from typing import Any, Deque, Dict, Optional, Tuple, Type, TypeVar from typing import Any, Deque, Dict, Optional, Tuple, Type, TypeVar
import cattr import cattr
@ -38,9 +33,8 @@ from scone.head.dependency_tracking import (
DependencyBook, DependencyBook,
DependencyCache, DependencyCache,
DependencyTracker, DependencyTracker,
hash_dict,
) )
from scone.head.head import SPECIAL_HEAD_SOUS, Head from scone.head.head import Head
from scone.head.recipe import Recipe from scone.head.recipe import Recipe
from scone.sous import utensil_namer from scone.sous import utensil_namer
from scone.sous.utensils import Utensil from scone.sous.utensils import Utensil
@ -72,7 +66,7 @@ class Preparation:
if sous == "(self)": if sous == "(self)":
sous = self._current_recipe.recipe_context.sous sous = self._current_recipe.recipe_context.sous
resource = Resource.new_lenient( resource = Resource(
requirement, identifier, sous, frozendict(extra_identifiers) requirement, identifier, sous, frozendict(extra_identifiers)
) )
@ -93,7 +87,7 @@ class Preparation:
if sous == "(self)": if sous == "(self)":
sous = self._current_recipe.recipe_context.sous sous = self._current_recipe.recipe_context.sous
resource = Resource.new_lenient( resource = Resource(
requirement, identifier, sous, frozendict(extra_identifiers) requirement, identifier, sous, frozendict(extra_identifiers)
) )
@ -130,9 +124,7 @@ class Preparation:
class Kitchen: class Kitchen:
def __init__( def __init__(
self, self, head: "Head", dependency_store: DependencyCache,
head: "Head",
dependency_store: DependencyCache,
): ):
self._chanproheads: Dict[Tuple[str, str], Future[ChanProHead]] = dict() self._chanproheads: Dict[Tuple[str, str], Future[ChanProHead]] = dict()
self._dependency_store = dependency_store self._dependency_store = dependency_store
@ -141,7 +133,6 @@ class Kitchen:
self.last_updated_ats: Dict[Resource, int] = dict() self.last_updated_ats: Dict[Resource, int] = dict()
self._cookable: Queue[Optional[Vertex]] = Queue() self._cookable: Queue[Optional[Vertex]] = Queue()
self._sleeper_slots: int = 0 self._sleeper_slots: int = 0
self._kitchen_time: int = int(1000 * time.time())
def get_dependency_tracker(self): def get_dependency_tracker(self):
return self._dependency_trackers[current_recipe.get()] return self._dependency_trackers[current_recipe.get()]
@ -167,17 +158,18 @@ class Kitchen:
return ChanProHead(cp, root) return ChanProHead(cp, root)
if host == SPECIAL_HEAD_SOUS:
raise ValueError("Can't connect to special 'head' sous over SSH")
hostuser = (host, user) hostuser = (host, user)
if hostuser not in self._chanproheads: if hostuser not in self._chanproheads:
self._chanproheads[hostuser] = asyncio.create_task(new_conn()) self._chanproheads[hostuser] = asyncio.create_task(new_conn())
return await self._chanproheads[hostuser] return await self._chanproheads[hostuser]
async def prepare_to_cook(self): async def cook_all(self):
await self._emit_fridge_resources() # TODO fridge emitter
num_workers = 8
self._sleeper_slots = num_workers - 1
for vertex in self.head.dag.vertices: for vertex in self.head.dag.vertices:
if isinstance(vertex, Recipe): if isinstance(vertex, Recipe):
@ -200,108 +192,72 @@ class Kitchen:
) )
self._cookable.put_nowait(vertex) self._cookable.put_nowait(vertex)
async def cook_all(self):
num_workers = 8
self._sleeper_slots = num_workers - 1
workers = [] workers = []
workers_routines = []
for _ in range(num_workers): for _ in range(num_workers):
worker = CookingWorker(self) workers.append(self._cooking_worker())
workers.append(worker)
workers_routines.append(worker.start())
# register handler await asyncio.gather(*workers, return_exceptions=False)
def signal_handler_progress(_1, _2):
eprint("----- SIGUSR1 Progress Report -----")
for i, worker in enumerate(workers):
eprint(f"Worker {i} ({worker.state}):")
eprint(f" recipe: {worker.current_recipe}")
eprint("-----------------------------------")
signal.signal(signal.SIGUSR1, signal_handler_progress) async def _cooking_worker(self):
dag = self.head.dag
while True:
if self._sleeper_slots <= 0 and self._cookable.empty():
self._sleeper_slots -= 1
self._cookable.put_nowait(None)
break
await asyncio.gather(*workers_routines, return_exceptions=False) self._sleeper_slots -= 1
# unregister handler
signal.signal(signal.SIGUSR1, signal.SIG_IGN)
async def close_all_ssh_connections(self):
connections = self._chanproheads
self._chanproheads = dict()
for _key_tuple, connection_future in connections.items():
try: try:
connection = await connection_future next_job = await self._cookable.get()
await connection.close() finally:
except Exception: self._sleeper_slots += 1
traceback.print_exc()
async def _should_skip( if next_job is None:
self, recipe: Recipe
) -> Tuple[Optional[DependencyBook], bool]:
"""
:param recipe: recipe to inquire about
:return: dep book, or None if there wasn't one needed to compute this
and true if the recipe should be skipped, false otherwise.
"""
only_when_flag_set = False
if recipe in self.head.dag.watching:
for watching, only_when in self.head.dag.watching[recipe].items():
if isinstance(watching, Resource):
# only recipe watches are accepted here.
# resource watches are handled by adding them to the watchlist
# in the dependency book
continue continue
assert isinstance(watching, Recipe)
only_when_flag_set |= only_when
watch_rmeta = self.head.dag.recipe_meta[watching]
if watch_rmeta.state == RecipeState.COOKED:
# underlying recipe changed. Ideally want a new changed state.
# TODO(design): have a 'changed' state for recipes?
return None, False
if only_when_flag_set: if isinstance(next_job, Recipe):
# TODO(design) is it sensible to skip this here? What if we need to meta = dag.recipe_meta[next_job]
# provide something? I suppose it's not guaranteed to be provided.
return None, True
inquiry = await self._dependency_store.inquire(recipe) # TODO try to deduplicate
if inquiry is None: meta.state = RecipeState.BEING_COOKED
return None, False current_recipe.set(next_job)
_id, prev_book = inquiry eprint(f"cooking {next_job}")
self._dependency_trackers[next_job] = DependencyTracker(
# ignored books are not valid... DependencyBook(), dag, next_job
if prev_book.ignored: )
return prev_book, False
# compute and compare the var hash...
sous_vars = recipe.recipe_context.variables
vars_to_hash = {}
for var in prev_book.var_list:
try: try:
vars_to_hash[var] = sous_vars.get_dotted(var) await next_job.cook(self)
except KeyError: except Exception as e:
# variable missing meta.state = RecipeState.FAILED
return prev_book, False raise RuntimeError(f"Recipe {next_job} failed!") from e
my_varhash = hash_dict(vars_to_hash) eprint(f"cooked {next_job}")
if prev_book.varhash != my_varhash: # TODO cook
return prev_book, False # TODO store depbook
await self._store_dependency(next_job)
meta.state = RecipeState.COOKED
elif isinstance(next_job, Resource):
eprint(f"have {next_job}")
pass
# compare watched resources... for edge in dag.edges[next_job]:
for resource, last_update_time in prev_book.watching.items(): logger.debug("updating edge: %s%s", next_job, edge)
res_time = self.head.dag.resource_time.get(resource) if isinstance(edge, Recipe):
if res_time is None: rec_meta = dag.recipe_meta[edge]
# suggests something has changed in a significant way... rec_meta.incoming_uncompleted -= 1
return prev_book, False logger.debug("has %d incoming", rec_meta.incoming_uncompleted)
if (
if res_time != last_update_time: rec_meta.incoming_uncompleted == 0
# recipe is out of date and rec_meta.state == RecipeState.PENDING
return prev_book, False ):
rec_meta.state = RecipeState.COOKABLE
return prev_book, True self._cookable.put_nowait(edge)
elif isinstance(edge, Resource):
res_meta = dag.resource_meta[edge]
res_meta.incoming_uncompleted -= 1
logger.debug("has %d incoming", res_meta.incoming_uncompleted)
if res_meta.incoming_uncompleted == 0 and not res_meta.completed:
res_meta.completed = True
self._cookable.put_nowait(edge)
# async def run_epoch( # async def run_epoch(
# self, # self,
@ -372,7 +328,7 @@ class Kitchen:
dependency_tracker = self._dependency_trackers.pop(recipe, None) dependency_tracker = self._dependency_trackers.pop(recipe, None)
if not dependency_tracker: if not dependency_tracker:
raise KeyError(f"Recipe {recipe} has not been tracked.") raise KeyError(f"Recipe {recipe} has not been tracked.")
depbook = dependency_tracker.build_book() depbook = dependency_tracker.book
if depbook: if depbook:
await self._dependency_store.register(recipe, depbook) await self._dependency_store.register(recipe, depbook)
@ -384,25 +340,6 @@ class Kitchen:
context = recipe.recipe_context context = recipe.recipe_context
return Resource(kind, id, context.sous, extra_params) return Resource(kind, id, context.sous, extra_params)
async def _emit_fridge_resources(self):
from scone.default.steps.fridge_steps import get_fridge_dirs
for fridge_dir in get_fridge_dirs(self.head):
num_prefix_parts = len(fridge_dir.parts)
for root, _dirs, files in os.walk(fridge_dir):
for file in files:
full_path = Path(root, file)
parts = full_path.parts
if parts[0:num_prefix_parts] != fridge_dir.parts:
raise RuntimeError(
f"{parts[0:num_prefix_parts]!r} != {fridge_dir.parts!r}"
)
fridge_relative_path = "/".join(parts[num_prefix_parts:])
fridge_res = Resource("fridge", fridge_relative_path, None)
stat = os.stat(full_path)
mtime = int(stat.st_mtime_ns // 1e6)
self.head.dag.resource_time[fridge_res] = mtime
# #
# @attr.s(auto_attribs=True) # @attr.s(auto_attribs=True)
@ -447,100 +384,3 @@ class Kitchen:
# await asyncio.gather( # await asyncio.gather(
# *[asyncio.create_task(cooker()) for _ in range(concurrency_limit)] # *[asyncio.create_task(cooker()) for _ in range(concurrency_limit)]
# ) # )
class CookingWorker:
def __init__(self, kitchen):
self.kitchen = kitchen
self.state = "not started"
self.current_recipe = None
async def start(self):
self.state = "started"
dag = self.kitchen.head.dag
while True:
if self.kitchen._sleeper_slots <= 0 and self.kitchen._cookable.empty():
self.kitchen._sleeper_slots -= 1
self.kitchen._cookable.put_nowait(None)
self.state = "ended"
break
self.kitchen._sleeper_slots -= 1
try:
self.state = "polling"
next_job = await self.kitchen._cookable.get()
finally:
self.state = "after polling"
self.kitchen._sleeper_slots += 1
if next_job is None:
continue
if isinstance(next_job, Recipe):
meta = dag.recipe_meta[next_job]
last_book, should_skip = await self.kitchen._should_skip(next_job)
if should_skip and last_book:
# logger.debug("skipping %s", next_job)
meta.state = RecipeState.SKIPPED
# provide stuff that it provided last time
for res, last_update_time in last_book.provided.items():
# logger.debug("skip-providing %s", res)
dag.resource_time[res] = max(
last_update_time, dag.resource_time.get(res, -1)
)
else:
meta.state = RecipeState.BEING_COOKED
current_recipe.set(next_job)
eprint(f"cooking {next_job}")
tracker = DependencyTracker(DependencyBook(), dag, next_job)
self.kitchen._dependency_trackers[next_job] = tracker
try:
self.state = "cooking"
self.current_recipe = next_job
await next_job.cook(self.kitchen)
self.state = "cooked"
# provide stuff
for outgoing in dag.edges[next_job]:
if not isinstance(outgoing, Resource):
continue
# logger.debug("providing %s", outgoing)
tracker.provide(outgoing)
except Exception as e:
meta.state = RecipeState.FAILED
raise RuntimeError(f"Recipe {next_job} failed!") from e
eprint(f"cooked {next_job}")
if next_job in self.kitchen.head.dag.watching:
for watching, only_when in self.kitchen.head.dag.watching[
next_job
].items():
if isinstance(watching, Resource):
# recipe watches are handled when loading the
# dependency book.
tracker.watch(watching)
await self.kitchen._store_dependency(next_job)
meta.state = RecipeState.COOKED
elif isinstance(next_job, Resource):
eprint(f"have {next_job}")
pass
for edge in dag.edges[next_job]:
# logger.debug("updating edge: %s → %s", next_job, edge)
if isinstance(edge, Recipe):
rec_meta = dag.recipe_meta[edge]
rec_meta.incoming_uncompleted -= 1
if (
rec_meta.incoming_uncompleted == 0
and rec_meta.state == RecipeState.PENDING
):
rec_meta.state = RecipeState.COOKABLE
self.kitchen._cookable.put_nowait(edge)
elif isinstance(edge, Resource):
res_meta = dag.resource_meta[edge]
res_meta.incoming_uncompleted -= 1
if res_meta.incoming_uncompleted == 0 and not res_meta.completed:
res_meta.completed = True
self.kitchen._cookable.put_nowait(edge)

View File

@ -20,7 +20,7 @@ import os
import typing import typing
from collections import defaultdict, deque from collections import defaultdict, deque
from pathlib import Path from pathlib import Path
from typing import Any, Deque, Dict, Iterable, List, Optional, Set, Tuple, Union from typing import Any, Deque, Dict, Iterable, List, Optional, Tuple, Union
import attr import attr
import textx import textx
@ -45,13 +45,8 @@ scoml_classes = scoml_grammar.namespaces["scoml"]
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
class ControlDirective:
def iter_over(self, vars: Variables) -> Iterable[Variables]:
raise NotImplementedError("Abstract.")
@attr.s(auto_attribs=True) @attr.s(auto_attribs=True)
class ForDirective(ControlDirective): class ForDirective:
""" """
For loop_variable in collection For loop_variable in collection
""" """
@ -62,59 +57,6 @@ class ForDirective(ControlDirective):
# List of literals or str for a variable (by name) # List of literals or str for a variable (by name)
collection: Union[str, List[Any]] collection: Union[str, List[Any]]
def iter_over(self, vars: Variables):
to_iter = self.collection
if isinstance(to_iter, str):
to_iter = vars.get_dotted(to_iter)
if not isinstance(to_iter, list):
raise ValueError(f"to_iter = {to_iter!r} not a list")
for item in to_iter:
new_vars = Variables(vars)
new_vars.set_dotted(self.loop_variable, item)
yield new_vars
@attr.s(auto_attribs=True)
class IfDirective(ControlDirective):
def condition_true(self, vars: Variables) -> bool:
return False
def iter_over(self, vars: Variables) -> Iterable[Variables]:
if self.condition_true(vars):
yield vars
else:
yield from ()
@attr.s(auto_attribs=True)
class IfSetDirective(IfDirective):
# Name of the variable to check for existence.
check_variable: str
def condition_true(self, vars: Variables) -> bool:
return vars.has_dotted(self.check_variable)
@attr.s(auto_attribs=True)
class IfCondDirective(IfDirective):
# Name of the variable
variable: str
# The operator that is used
operator: str
# The other value to check for equality against
other_value: str
def condition_true(self, vars: Variables) -> bool:
if self.operator == "=":
value = vars.get_dotted(self.variable)
return value == self.other_value
else:
raise NotImplementedError(f"operator {self.operator} not understood.")
@attr.s(auto_attribs=True) @attr.s(auto_attribs=True)
class RecipeEdgeDirective: class RecipeEdgeDirective:
@ -132,14 +74,6 @@ class ResourceEdgeDirective:
resource: Resource resource: Resource
@attr.s(auto_attribs=True)
class ListenEdgeDirective:
# "when" or "only when"
kind: str
recipe_or_resource: Union[str, Resource]
@attr.s(auto_attribs=True, eq=False) @attr.s(auto_attribs=True, eq=False)
class MenuBlock: class MenuBlock:
id: Optional[None] id: Optional[None]
@ -152,7 +86,7 @@ class MenuBlock:
user_directive: Optional[str] = None user_directive: Optional[str] = None
sous_directive: Optional[str] = None sous_directive: Optional[str] = None
control_directives: List[ControlDirective] = attr.ib(factory=list) for_directives: List[ForDirective] = attr.ib(factory=list)
import_directives: List[str] = attr.ib(factory=list) import_directives: List[str] = attr.ib(factory=list)
recipe_edges: List[RecipeEdgeDirective] = attr.ib(factory=list) recipe_edges: List[RecipeEdgeDirective] = attr.ib(factory=list)
resource_edges: List[ResourceEdgeDirective] = attr.ib(factory=list) resource_edges: List[ResourceEdgeDirective] = attr.ib(factory=list)
@ -172,13 +106,12 @@ class MenuRecipe:
user_directive: Optional[str] = None user_directive: Optional[str] = None
sous_directive: Optional[str] = None sous_directive: Optional[str] = None
control_directives: List[ControlDirective] = attr.ib(factory=list) for_directives: List[ForDirective] = attr.ib(factory=list)
recipe_edges: List[RecipeEdgeDirective] = attr.ib(factory=list) recipe_edges: List[RecipeEdgeDirective] = attr.ib(factory=list)
resource_edges: List[ResourceEdgeDirective] = attr.ib(factory=list) resource_edges: List[ResourceEdgeDirective] = attr.ib(factory=list)
listen_edges: List[ListenEdgeDirective] = attr.ib(factory=list)
def convert_textx_value(txvalue) -> Union[list, str, int, bool, dict]: def convert_textx_value(txvalue) -> Any:
if isinstance(txvalue, scoml_classes["NaturalList"]): if isinstance(txvalue, scoml_classes["NaturalList"]):
return [convert_textx_value(element) for element in txvalue.elements] return [convert_textx_value(element) for element in txvalue.elements]
elif ( elif (
@ -194,7 +127,6 @@ def convert_textx_value(txvalue) -> Union[list, str, int, bool, dict]:
result = dict() result = dict()
for pair in txvalue.pairs: for pair in txvalue.pairs:
result[convert_textx_value(pair.key)] = convert_textx_value(pair.value) result[convert_textx_value(pair.key)] = convert_textx_value(pair.value)
return result
else: else:
raise ValueError(f"Unknown SCOML value: {txvalue}") raise ValueError(f"Unknown SCOML value: {txvalue}")
@ -222,7 +154,7 @@ def convert_textx_recipe(txrecipe_or_subblock, parent: Optional[MenuBlock]):
if isinstance(directive, scoml_classes["UserDirective"]): if isinstance(directive, scoml_classes["UserDirective"]):
recipe.user_directive = directive.user recipe.user_directive = directive.user
elif isinstance(directive, scoml_classes["SousDirective"]): elif isinstance(directive, scoml_classes["SousDirective"]):
recipe.sous_directive = directive.sous recipe.user_directive = directive.sous
elif isinstance(directive, scoml_classes["ResourceEdgeDirective"]): elif isinstance(directive, scoml_classes["ResourceEdgeDirective"]):
recipe.resource_edges.append( recipe.resource_edges.append(
ResourceEdgeDirective( ResourceEdgeDirective(
@ -233,32 +165,6 @@ def convert_textx_recipe(txrecipe_or_subblock, parent: Optional[MenuBlock]):
recipe.recipe_edges.append( recipe.recipe_edges.append(
RecipeEdgeDirective(directive.kind[1:], directive.id) RecipeEdgeDirective(directive.kind[1:], directive.id)
) )
elif isinstance(directive, scoml_classes["ListenEdgeDirective"]):
recipe.listen_edges.append(
ListenEdgeDirective(
directive.kind[1:],
directive.recipe_id
or convert_textx_resource(directive.resource),
)
)
elif isinstance(directive, scoml_classes["ForDirective"]):
for_list = directive.collection or convert_textx_value(directive.list)
assert isinstance(for_list, list) or isinstance(for_list, str)
recipe.control_directives.append(
ForDirective(directive.loop_variable, for_list)
)
elif isinstance(directive, scoml_classes["IfSetDirective"]):
var = directive.variable
assert isinstance(var, str)
recipe.control_directives.append(IfSetDirective(var))
elif isinstance(directive, scoml_classes["IfCondDirective"]):
var = directive.variable
op = directive.operator
other_value = convert_textx_value(directive.other_value)
assert isinstance(var, str)
assert isinstance(op, str)
assert isinstance(other_value, str)
recipe.control_directives.append(IfCondDirective(var, op, other_value))
else: else:
raise ValueError(f"Unknown directive {directive}") raise ValueError(f"Unknown directive {directive}")
@ -279,9 +185,7 @@ def convert_textx_resource(txresource) -> Resource:
else: else:
sous = txresource.sous sous = txresource.sous
resource_id = convert_textx_value(txresource.primary) return Resource(txresource.type, txresource.primary, sous, extra_params)
assert isinstance(resource_id, str)
return Resource(txresource.type, resource_id, sous, extra_params)
def convert_textx_block(txblock, parent: Optional[MenuBlock]) -> MenuBlock: def convert_textx_block(txblock, parent: Optional[MenuBlock]) -> MenuBlock:
@ -298,23 +202,12 @@ def convert_textx_block(txblock, parent: Optional[MenuBlock]) -> MenuBlock:
elif isinstance(directive, scoml_classes["SousDirective"]): elif isinstance(directive, scoml_classes["SousDirective"]):
block.sous_directive = directive.sous block.sous_directive = directive.sous
elif isinstance(directive, scoml_classes["ForDirective"]): elif isinstance(directive, scoml_classes["ForDirective"]):
for_list = directive.collection or convert_textx_value(directive.list) block.for_directives.append(
assert isinstance(for_list, list) or isinstance(for_list, str) ForDirective(
block.control_directives.append( directive.loop_variable,
ForDirective(directive.loop_variable, for_list) directive.collection or convert_textx_value(directive.list),
)
) )
elif isinstance(directive, scoml_classes["IfSetDirective"]):
var = directive.variable
assert isinstance(var, str)
block.control_directives.append(IfSetDirective(var))
elif isinstance(directive, scoml_classes["IfCondDirective"]):
var = directive.variable
op = directive.operator
other_value = convert_textx_value(directive.other_value)
assert isinstance(var, str)
assert isinstance(op, str)
assert isinstance(other_value, str)
block.control_directives.append(IfCondDirective(var, op, other_value))
elif isinstance(directive, scoml_classes["ImportDirective"]): elif isinstance(directive, scoml_classes["ImportDirective"]):
block.import_directives.append(directive.importee) block.import_directives.append(directive.importee)
elif isinstance(directive, scoml_classes["ResourceEdgeDirective"]): elif isinstance(directive, scoml_classes["ResourceEdgeDirective"]):
@ -430,7 +323,7 @@ class MenuLoader:
a: Union[MenuBlock, MenuRecipe] = referrer a: Union[MenuBlock, MenuRecipe] = referrer
strip = 0 strip = 0
while a != first_common_ancestor: while a != first_common_ancestor:
strip += len(a.control_directives) strip += len(a.for_directives)
parent = a.parent parent = a.parent
assert parent is not None assert parent is not None
a = parent a = parent
@ -438,7 +331,7 @@ class MenuLoader:
a = menu_recipe a = menu_recipe
extra = 0 extra = 0
while a != first_common_ancestor: while a != first_common_ancestor:
extra += len(a.control_directives) extra += len(a.for_directives)
parent = a.parent parent = a.parent
assert parent is not None assert parent is not None
a = parent a = parent
@ -460,38 +353,23 @@ class MenuLoader:
return result return result
def get_all_menublock_recipes(self, block: MenuBlock) -> Iterable[MenuRecipe]:
for child in block.contents:
if isinstance(child, MenuRecipe):
yield child
elif isinstance(child, MenuBlock):
yield from self.get_all_menublock_recipes(child)
else:
raise RuntimeError(f"Unknown child {child!r}")
def dagify_recipe( def dagify_recipe(
self, self,
recipe: MenuRecipe, recipe: MenuRecipe,
hierarchical_source: str, hierarchical_source: str,
fors: Tuple[ForDirective, ...], fors: Tuple[ForDirective, ...],
applicable_souss: Iterable[str], applicable_souss: Iterable[str],
sous_mask: Optional[Set[str]],
applicable_user: Optional[str], applicable_user: Optional[str],
): ):
recipe_class = self._head.recipe_loader.get_class(recipe.kind) recipe_class = self._head.recipe_loader.get_class(recipe.kind)
if recipe_class is None:
raise ValueError(f"No recipe class found for {recipe.kind!r}")
fors = fors + tuple(recipe.control_directives) fors = fors + tuple(recipe.for_directives)
if recipe.user_directive: if recipe.user_directive:
applicable_user = recipe.user_directive applicable_user = recipe.user_directive
if recipe.sous_directive: if recipe.sous_directive:
applicable_souss = self._head.get_souss_for_hostspec(recipe.sous_directive) applicable_souss = self._head.get_souss_for_hostspec(recipe.sous_directive)
if sous_mask:
applicable_souss = set(applicable_souss)
applicable_souss.intersection_update(sous_mask)
for sous in applicable_souss: for sous in applicable_souss:
if not applicable_user: if not applicable_user:
@ -499,19 +377,16 @@ class MenuLoader:
assert applicable_user is not None assert applicable_user is not None
sous_vars = self._head.variables[sous] sous_vars = self._head.variables[sous]
for context_vars, for_indices in self._control_apply( for _vars, for_indices in self._for_apply(fors, sous_vars, tuple()):
fors, sous_vars, tuple()
):
context = RecipeContext( context = RecipeContext(
sous=sous, sous=sous,
user=context_vars.eval(applicable_user), user=applicable_user,
slug=recipe.id, slug=recipe.id,
hierarchical_source=hierarchical_source, # XXX hierarchical_source=hierarchical_source, # XXX
human=recipe.human, human=recipe.human,
variables=context_vars,
) )
try: try:
args = context_vars.substitute_in_dict_copy(recipe.arguments) args = _vars.substitute_in_dict_copy(recipe.arguments)
except KeyError as ke: except KeyError as ke:
raise KeyError( raise KeyError(
f"When substituting for {hierarchical_source} / {recipe}" f"When substituting for {hierarchical_source} / {recipe}"
@ -526,19 +401,15 @@ class MenuLoader:
hierarchical_source: str, hierarchical_source: str,
fors: Tuple[ForDirective, ...], fors: Tuple[ForDirective, ...],
applicable_souss: Iterable[str], applicable_souss: Iterable[str],
sous_mask: Optional[Set[str]],
applicable_user: Optional[str], applicable_user: Optional[str],
): ):
fors = fors + tuple(block.control_directives) fors = fors + tuple(block.for_directives)
if block.user_directive: if block.user_directive:
applicable_user = block.user_directive applicable_user = block.user_directive
if block.sous_directive: if block.sous_directive:
applicable_souss = self._head.get_souss_for_hostspec(block.sous_directive) applicable_souss = self._head.get_souss_for_hostspec(block.sous_directive)
if sous_mask:
applicable_souss = set(applicable_souss)
applicable_souss.intersection_update(sous_mask)
for content in block.contents: for content in block.contents:
if isinstance(content, MenuBlock): if isinstance(content, MenuBlock):
@ -548,7 +419,6 @@ class MenuLoader:
f"{hierarchical_source}.{block_name}", f"{hierarchical_source}.{block_name}",
fors, fors,
applicable_souss, applicable_souss,
sous_mask,
applicable_user, applicable_user,
) )
elif isinstance(content, MenuRecipe): elif isinstance(content, MenuRecipe):
@ -557,56 +427,38 @@ class MenuLoader:
hierarchical_source, hierarchical_source,
fors, fors,
applicable_souss, applicable_souss,
sous_mask,
applicable_user, applicable_user,
) )
else: else:
raise ValueError(f"{content}?") raise ValueError(f"{content}?")
def substitute_vars_in_resource(
self, vars: Variables, resource: Resource
) -> Resource:
evalled_id = vars.eval(resource.id)
return attr.evolve(resource, id=evalled_id)
def postdagify_recipe( def postdagify_recipe(
self, self,
recipe: MenuRecipe, recipe: MenuRecipe,
fors: Tuple[ForDirective, ...], fors: Tuple[ForDirective, ...],
applicable_souss: Iterable[str], applicable_souss: Iterable[str],
sous_mask: Optional[Set[str]], ):
) -> None:
# TODO(feature): add edges # TODO(feature): add edges
# add fors # add fors
fors = fors + tuple(recipe.control_directives) fors = fors + tuple(recipe.for_directives)
if recipe.sous_directive: if recipe.sous_directive:
applicable_souss = self._head.get_souss_for_hostspec(recipe.sous_directive) applicable_souss = self._head.get_souss_for_hostspec(recipe.sous_directive)
if sous_mask:
applicable_souss = set(applicable_souss)
applicable_souss.intersection_update(sous_mask)
for sous in applicable_souss: for sous in applicable_souss:
sous_vars = self._head.variables[sous] sous_vars = self._head.variables[sous]
for context_vars, for_indices in self._control_apply( for _vars, for_indices in self._for_apply(fors, sous_vars, tuple()):
fors, sous_vars, tuple()
):
instance = self._recipes[recipe][(sous, for_indices)] # noqa instance = self._recipes[recipe][(sous, for_indices)] # noqa
for recipe_edge in recipe.recipe_edges: for recipe_edge in recipe.recipe_edges:
target = self.resolve_ref(recipe, recipe_edge.recipe_id) target = self.resolve_ref(recipe, recipe_edge.recipe_id)
if isinstance(target, MenuBlock): if isinstance(target, MenuBlock):
# Get all recipes and apply the edge to them. # TODO(feature)
for target_recipe in self.get_all_menublock_recipes(target): raise NotImplementedError(
for target_instance in self.get_related_instances( "@after/@before on block is not yet here sadly"
sous, for_indices, recipe, target_recipe )
):
if recipe_edge.kind == "after":
self._dag.add_ordering(target_instance, instance)
elif recipe_edge.kind == "before":
self._dag.add_ordering(instance, target_instance)
elif isinstance(target, MenuRecipe): elif isinstance(target, MenuRecipe):
for target_instance in self.get_related_instances( for target_instance in self.get_related_instances(
sous, for_indices, recipe, target sous, for_indices, recipe, target
@ -617,9 +469,7 @@ class MenuLoader:
self._dag.add_ordering(instance, target_instance) self._dag.add_ordering(instance, target_instance)
for resource_edge in recipe.resource_edges: for resource_edge in recipe.resource_edges:
resource = self.substitute_vars_in_resource( resource = resource_edge.resource
context_vars, resource_edge.resource
)
if resource.sous == "(self)": if resource.sous == "(self)":
resource = attr.evolve(resource, sous=sous) resource = attr.evolve(resource, sous=sous)
@ -631,96 +481,62 @@ class MenuLoader:
elif resource_edge.kind == "provides": elif resource_edge.kind == "provides":
self._dag.provides(instance, resource) self._dag.provides(instance, resource)
for listen_edge in recipe.listen_edges: # XXX apply specific edges here including those from parent
if isinstance(listen_edge.recipe_or_resource, Resource):
# TODO(design): is it right for this to NEED it rather
# than WANT it?
resource = listen_edge.recipe_or_resource
if resource.sous == "(self)":
resource = attr.evolve(resource, sous=sous)
self._dag.needs(instance, resource)
self._dag.watches(
instance,
resource,
listen_edge.kind == "only when",
)
elif isinstance(listen_edge.recipe_or_resource, str):
target = self.resolve_ref(
recipe, listen_edge.recipe_or_resource
)
if isinstance(target, MenuRecipe):
for target_instance in self.get_related_instances(
sous, for_indices, recipe, target
):
self._dag.add_ordering(target_instance, instance)
self._dag.watches(
instance,
target_instance,
listen_edge.kind == "only when",
)
else:
raise RuntimeError(f"not supported on target: {target!r}")
# XXX apply edges from parent
def postdagify_block( def postdagify_block(
self, self,
block: MenuBlock, block: MenuBlock,
fors: Tuple[ForDirective, ...], fors: Tuple[ForDirective, ...],
applicable_souss: Iterable[str], applicable_souss: Iterable[str],
sous_mask: Optional[Set[str]],
): ):
# XXX pass down specific edges here # XXX pass down specific edges here
# TODO(feature): add edges # TODO(feature): add edges
fors = fors + tuple(block.control_directives) fors = fors + tuple(block.for_directives)
if block.sous_directive: if block.sous_directive:
applicable_souss = self._head.get_souss_for_hostspec(block.sous_directive) applicable_souss = self._head.get_souss_for_hostspec(block.sous_directive)
if sous_mask:
applicable_souss = set(applicable_souss)
applicable_souss.intersection_update(sous_mask)
for content in block.contents: for content in block.contents:
if isinstance(content, MenuBlock): if isinstance(content, MenuBlock):
self.postdagify_block(content, fors, applicable_souss, sous_mask) self.postdagify_block(content, fors, applicable_souss)
elif isinstance(content, MenuRecipe): elif isinstance(content, MenuRecipe):
self.postdagify_recipe(content, fors, applicable_souss, sous_mask) self.postdagify_recipe(content, fors, applicable_souss)
else: else:
raise ValueError(f"{content}?") raise ValueError(f"{content}?")
def dagify_all(self, sous_subset: Optional[Set[str]]): def dagify_all(self):
for name, unit in self._units.items(): for name, unit in self._units.items():
self.dagify_block( self.dagify_block(
unit, unit, name, tuple(), self._head.get_souss_for_hostspec("all"), None
name,
tuple(),
self._head.get_souss_for_hostspec("all"),
sous_subset,
None,
) )
for _name, unit in self._units.items(): for _name, unit in self._units.items():
self.postdagify_block( self.postdagify_block(
unit, tuple(), self._head.get_souss_for_hostspec("all"), sous_subset unit, tuple(), self._head.get_souss_for_hostspec("all")
) )
def _control_apply( def _for_apply(
self, self, fors: Tuple[ForDirective, ...], vars: "Variables", accum: Tuple[int, ...]
controls: Tuple[ControlDirective, ...],
vars: "Variables",
accum: Tuple[int, ...],
) -> Iterable[Tuple["Variables", Tuple[int, ...]]]: ) -> Iterable[Tuple["Variables", Tuple[int, ...]]]:
if not controls: if not fors:
yield vars, accum yield vars, accum
return return
head = controls[0] head = fors[0]
tail = controls[1:] tail = fors[1:]
for idx, new_vars in enumerate(head.iter_over(vars)): to_iter = head.collection
yield from self._control_apply(tail, new_vars, accum + (idx,)) if isinstance(to_iter, str):
to_iter = vars.get_dotted(to_iter)
if not isinstance(to_iter, list):
raise ValueError(f"to_iter = {to_iter!r} not a list")
for idx, item in enumerate(to_iter):
new_vars = Variables(vars)
new_vars.set_dotted(head.loop_variable, item)
yield from self._for_apply(tail, new_vars, accum + (idx,))
def load_menus_in_dir(self) -> RecipeDag: def load_menus_in_dir(self) -> RecipeDag:
dag = RecipeDag() dag = RecipeDag()

View File

@ -20,8 +20,6 @@ from typing import Any, Dict, Optional
import attr import attr
from scone.head.variables import Variables
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from scone.head.head import Head from scone.head.head import Head
from scone.head.kitchen import Kitchen, Preparation from scone.head.kitchen import Kitchen, Preparation
@ -45,8 +43,6 @@ class RecipeContext:
human: str human: str
variables: Variables
class Recipe: class Recipe:
def __init__( def __init__(
@ -81,20 +77,3 @@ class Recipe:
f"{cls.__name__} {self.recipe_context.human}" f"{cls.__name__} {self.recipe_context.human}"
f" on {self.recipe_context.sous} ({self.arguments})" f" on {self.recipe_context.sous} ({self.arguments})"
) )
class HeadRecipe(Recipe):
def __init__(
self, recipe_context: RecipeContext, args: Dict[str, Any], head: "Head"
):
super().__init__(recipe_context, args, head)
if recipe_context.sous != "head":
myname = self.__class__._NAME
raise ValueError(
f"[[{myname}]] is a head recipe, so should be run with @sous = head"
)
def prepare(self, preparation: "Preparation", head: "Head") -> None:
# Don't add a requirement for an os-user, since it's a head recipe
# that doesn't have one.
pass

View File

@ -14,7 +14,7 @@
# #
# You should have received a copy of the GNU General Public License # You should have received a copy of the GNU General Public License
# along with Scone. If not, see <https://www.gnu.org/licenses/>. # along with Scone. If not, see <https://www.gnu.org/licenses/>.
import asyncio
import logging import logging
from typing import Optional, Tuple from typing import Optional, Tuple
@ -34,21 +34,8 @@ class AsyncSSHChanPro(ChanPro):
async def close(self) -> None: async def close(self) -> None:
await super(AsyncSSHChanPro, self).close() await super(AsyncSSHChanPro, self).close()
self._process.stdin.write_eof() await self._process.close()
self._process.stdin.close() await self._connection.close()
for _ in range(50):
await asyncio.sleep(0.1)
if self._process.at_eof(None):
logger.debug("at_eof")
break
else:
logger.debug("not at_eof quickly enough...")
self._process.close()
await self._process.wait_closed()
self._connection.close()
await self._connection.wait_closed()
async def open_ssh_sous( async def open_ssh_sous(

View File

@ -113,15 +113,9 @@ class Variables:
keys = name.split(".") keys = name.split(".")
try: try:
for k in keys: for k in keys:
if isinstance(current, dict): if not isinstance(current, dict):
raise ValueError(f"non-dictionary encountered when getting {name}")
current = current[k] current = current[k]
elif isinstance(current, list):
current = current[int(k)]
else:
raise ValueError(
f"non-dictionary, non-list encountered when getting {name}"
)
return current return current
except KeyError: except KeyError:
if self._delegate: if self._delegate:
@ -184,15 +178,6 @@ class Variables:
key, expr = incoming.popitem() key, expr = incoming.popitem()
if isinstance(expr, str): if isinstance(expr, str):
value = self._eval_with_incoming(expr, incoming) value = self._eval_with_incoming(expr, incoming)
# elif isinstance(expr, list): not what I thought
# # TODO this is still not 100% coverage of variables but good enough
# # for now?
# value = []
# for item in expr:
# if isinstance(item, str):
# value.append(self._eval_with_incoming(item, incoming))
# else:
# value.append(item)
else: else:
value = expr value = expr
self.set_dotted(key, value) self.set_dotted(key, value)
@ -209,12 +194,6 @@ class Variables:
self.substitute_inplace_in_dict(v) self.substitute_inplace_in_dict(v)
elif isinstance(v, str): elif isinstance(v, str):
dictionary[k] = self.eval(v) dictionary[k] = self.eval(v)
elif isinstance(v, list):
for idx, item in enumerate(v):
if isinstance(item, str):
v[idx] = self.eval(item)
elif isinstance(item, dict):
self.substitute_inplace_in_dict(item)
def substitute_in_dict_copy(self, dictionary: Dict[str, Any]): def substitute_in_dict_copy(self, dictionary: Dict[str, Any]):
new_dict = deepcopy(dictionary) new_dict = deepcopy(dictionary)
@ -222,12 +201,7 @@ class Variables:
return new_dict return new_dict
def toplevel(self): def toplevel(self):
if self._delegate: return self._vars
result = self._delegate.toplevel().copy()
else:
result = {}
result.update(self._vars)
return result
def keys(self) -> Set[str]: def keys(self) -> Set[str]:
keys = set(self._vars.keys()) keys = set(self._vars.keys())

View File

@ -20,7 +20,6 @@ import logging
import os import os
import pwd import pwd
import sys import sys
from itertools import filterfalse
from pathlib import Path from pathlib import Path
from typing import List, cast from typing import List, cast
@ -77,15 +76,10 @@ async def main(args: List[str]):
logger.info("Worktop dir is: %s", worktop.dir) logger.info("Worktop dir is: %s", worktop.dir)
awaitables = []
while True: while True:
try: try:
logger.debug("READ")
message = await root.recv() message = await root.recv()
logger.debug("RECV")
except EOFError: except EOFError:
logger.debug("RECV-EOF")
break break
if "nc" in message: if "nc" in message:
# start a new command channel # start a new command channel
@ -100,9 +94,7 @@ async def main(args: List[str]):
logger.debug("going to sched task with %r", utensil) logger.debug("going to sched task with %r", utensil)
awaitables.append(
asyncio.create_task(run_utensil(utensil, channel, worktop)) asyncio.create_task(run_utensil(utensil, channel, worktop))
)
elif "lost" in message: elif "lost" in message:
# for a then-non-existent channel, but probably just waiting on us # for a then-non-existent channel, but probably just waiting on us
# retry without a default route. # retry without a default route.
@ -110,18 +102,6 @@ async def main(args: List[str]):
else: else:
raise RuntimeError(f"Unknown ch0 message {message}") raise RuntimeError(f"Unknown ch0 message {message}")
for done_aw in filter(lambda aw: aw.done(), awaitables):
await done_aw
awaitables = list(filterfalse(lambda aw: aw.done(), awaitables))
logger.debug("EOF. waiting for awaitables")
for aw in awaitables:
await aw
logger.debug("waiting done!")
async def run_utensil(utensil: Utensil, channel: Channel, worktop: Worktop): async def run_utensil(utensil: Utensil, channel: Channel, worktop: Worktop):
try: try:

120
setup.py
View File

@ -12,38 +12,36 @@ from shutil import rmtree
from setuptools import find_packages, setup, Command from setuptools import find_packages, setup, Command
# Package meta-data. # Package meta-data.
NAME = "scone" NAME = 'scone'
DESCRIPTION = "Simple CONfiguration Engine" DESCRIPTION = 'Simple CONfiguration Engine'
URL = "https://librepush.net/project/scone" URL = 'https://librepush.net/project/scone'
EMAIL = "rei@librepush.net" EMAIL = 'rei@librepush.net'
AUTHOR = "Olivier 'reivilibre'" AUTHOR = 'Olivier \'reivilibre\''
REQUIRES_PYTHON = ">=3.7.0" REQUIRES_PYTHON = '>=3.7.0'
VERSION = "0.2.0" VERSION = '0.1.0'
# What packages are required for this module to be executed? # What packages are required for this module to be executed?
REQUIRED = [ REQUIRED = [
"cbor2~=5.4.0", "cbor2~=5.1.1",
"setuptools~=49.1.2", "setuptools~=49.1.2",
"toml~=0.10.2", "toml~=0.10.1",
"attrs~=21.2.0", "attrs~=19.3.0",
"cattrs>=1.7.1", "cattrs~=1.0.0",
"canonicaljson>=1.4.0", "canonicaljson~=1.2.0",
"immutabledict>=2.0.0", "immutabledict==1.0.0"
] ]
EX_SOUS_BASE = [] EX_SOUS_BASE = []
EX_SOUS_PG = ["asyncpg"] EX_SOUS_PG = ["asyncpg"]
EX_SOUS_MYSQL = ["mysql-connector-python"]
EX_SOUS_DOCKER = ["docker"]
EX_SOUS_ALL = EX_SOUS_BASE + EX_SOUS_PG + EX_SOUS_MYSQL + EX_SOUS_DOCKER EX_SOUS_ALL = EX_SOUS_BASE + EX_SOUS_PG
EX_DEV_MYPY = [
"types-toml",
"types-requests" # What packages are optional?
] EXTRAS = {
EX_HEAD = [ "head": [
"SecretStorage~=3.1.2", "SecretStorage~=3.1.2",
"asyncssh[libnacl]~=2.2.1", "asyncssh[libnacl]~=2.2.1",
"toposort~=1.5", "toposort~=1.5",
@ -51,28 +49,13 @@ EX_HEAD = [
"aiosqlite~=0.15.0", "aiosqlite~=0.15.0",
"requests", "requests",
"Jinja2", "Jinja2",
"typeguard", "typeguard"
"textx", ],
]
EX_HEAD_HEDNS = [
"hurricanedns~=1.0.2",
]
EX_HEAD_ALL = EX_HEAD + EX_HEAD_HEDNS
EX_DEV = EX_SOUS_ALL + EX_HEAD + EX_DEV_MYPY
# What packages are optional?
EXTRAS = {
"head-core": EX_HEAD,
"head": EX_HEAD_ALL,
"head-hedns": EX_HEAD_HEDNS,
"sous": EX_SOUS_ALL, "sous": EX_SOUS_ALL,
"sous-core": EX_SOUS_BASE, "sous-core": EX_SOUS_BASE,
"sous-pg": EX_SOUS_PG, "sous-pg": EX_SOUS_PG,
"sous-mysql": EX_SOUS_MYSQL,
"sous-docker": EX_SOUS_DOCKER, "docker": ["docker"] # TODO do this more properly if we can...
"mypy": EX_DEV_MYPY,
"dev": EX_DEV
} }
# The rest you shouldn't have to touch too much :) # The rest you shouldn't have to touch too much :)
@ -85,8 +68,8 @@ here = os.path.abspath(os.path.dirname(__file__))
# Import the README and use it as the long-description. # Import the README and use it as the long-description.
# Note: this will only work if 'README.md' is present in your MANIFEST.in file! # Note: this will only work if 'README.md' is present in your MANIFEST.in file!
try: try:
with io.open(os.path.join(here, "README.md"), encoding="utf-8") as f: with io.open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = "\n" + f.read() long_description = '\n' + f.read()
except FileNotFoundError: except FileNotFoundError:
long_description = DESCRIPTION long_description = DESCRIPTION
@ -94,22 +77,22 @@ except FileNotFoundError:
about = {} about = {}
if not VERSION: if not VERSION:
project_slug = NAME.lower().replace("-", "_").replace(" ", "_") project_slug = NAME.lower().replace("-", "_").replace(" ", "_")
with open(os.path.join(here, project_slug, "__version__.py")) as f: with open(os.path.join(here, project_slug, '__version__.py')) as f:
exec(f.read(), about) exec(f.read(), about)
else: else:
about["__version__"] = VERSION about['__version__'] = VERSION
class UploadCommand(Command): class UploadCommand(Command):
"""Support setup.py upload.""" """Support setup.py upload."""
description = "Build and publish the package." description = 'Build and publish the package.'
user_options = [] user_options = []
@staticmethod @staticmethod
def status(s): def status(s):
"""Prints things in bold.""" """Prints things in bold."""
print("\033[1m{0}\033[0m".format(s)) print('\033[1m{0}\033[0m'.format(s))
def initialize_options(self): def initialize_options(self):
pass pass
@ -119,20 +102,20 @@ class UploadCommand(Command):
def run(self): def run(self):
try: try:
self.status("Removing previous builds…") self.status('Removing previous builds…')
rmtree(os.path.join(here, "dist")) rmtree(os.path.join(here, 'dist'))
except OSError: except OSError:
pass pass
self.status("Building Source and Wheel (universal) distribution…") self.status('Building Source and Wheel (universal) distribution…')
os.system("{0} setup.py sdist bdist_wheel --universal".format(sys.executable)) os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
self.status("Uploading the package to PyPI via Twine…") self.status('Uploading the package to PyPI via Twine…')
os.system("twine upload dist/*") os.system('twine upload dist/*')
self.status("Pushing git tags…") self.status('Pushing git tags…')
os.system("git tag v{0}".format(about["__version__"])) os.system('git tag v{0}'.format(about['__version__']))
os.system("git push --tags") os.system('git push --tags')
sys.exit() sys.exit()
@ -140,10 +123,10 @@ class UploadCommand(Command):
# Where the magic happens: # Where the magic happens:
setup( setup(
name=NAME, name=NAME,
version=about["__version__"], version=about['__version__'],
description=DESCRIPTION, description=DESCRIPTION,
long_description=long_description, long_description=long_description,
long_description_content_type="text/markdown", long_description_content_type='text/markdown',
author=AUTHOR, author=AUTHOR,
author_email=EMAIL, author_email=EMAIL,
python_requires=REQUIRES_PYTHON, python_requires=REQUIRES_PYTHON,
@ -151,11 +134,12 @@ setup(
packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]), packages=find_packages(exclude=["tests", "*.tests", "*.tests.*", "tests.*"]),
# If your package is a single module, use this instead of 'packages': # If your package is a single module, use this instead of 'packages':
# py_modules=['mypackage'], # py_modules=['mypackage'],
entry_points={ entry_points={
"console_scripts": [ 'console_scripts': [
"scone=scone.head.cli:cli", 'scone=scone.head.cli:cli',
"scone-freezer=scone.head.cli.freezer:cli", 'scone-freezer=scone.head.cli.freezer:cli',
"michelin=scone.head.cli.michelin:cli", 'michelin=scone.head.cli.michelin:cli'
], ],
}, },
install_requires=REQUIRED, install_requires=REQUIRED,
@ -165,12 +149,14 @@ setup(
classifiers=[ classifiers=[
# Trove classifiers # Trove classifiers
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
"Programming Language :: Python", 'Programming Language :: Python',
"Programming Language :: Python :: 3", 'Programming Language :: Python :: 3',
"Programming Language :: Python :: 3.7", 'Programming Language :: Python :: 3.7',
"Programming Language :: Python :: Implementation :: CPython", 'Programming Language :: Python :: Implementation :: CPython',
"Programming Language :: Python :: Implementation :: PyPy", 'Programming Language :: Python :: Implementation :: PyPy'
], ],
# $ setup.py publish support. # $ setup.py publish support.
cmdclass={"upload": UploadCommand,}, cmdclass={
'upload': UploadCommand,
},
) )