diff --git a/hackathon/databroker/README.md b/hackathon/databroker/README.md new file mode 100644 index 0000000..0c67336 --- /dev/null +++ b/hackathon/databroker/README.md @@ -0,0 +1,40 @@ +# Swift/t + DataBroker + +This set of files demonstrates how to access a DataBroker from swift. + +## Environment + +To run this demo you need: + + - a Python 3.6 with requirements.txt installed + - a Python interpreter embedded in swift/t with swift-requirements.txt installed + - a running local mongodb server + +You can create a conda environment, which takes care of all the steps (except the +last one): + +``` +conda env create -f conda-env.yml +``` + +The mongodb server can be installed as follows: + +``` +sudo apt update && sudo apt install mongodb-server +``` + +## Execution + +To run this: + + - copy local.yml to `~/.config/databroker/local.yml` (or someplace + else on the search path, see + https://nsls-ii.github.io/databroker/configuration.html#search-path) + - then in 3 different sessions run the following: + - **1st**: `conda activate swift-t` and `bluesky-0MQ-proxy 2000 2001` + - **2nd**: `conda activate swift-t` and `swift-t -n -l read_databroker.swift` + - **3rd**: `conda activate swift-t` and `ipython`, and then in the IPython + session run `%run -i gen_data.py` to generate some synthetic data by + RunEngine, which will be published to the 0MQ proxy. In the 2nd + session you should see a bunch of events printed to the screen, meaning + the swift/t received the events from the proxy. diff --git a/hackathon/databroker/Readme.md b/hackathon/databroker/Readme.md deleted file mode 100644 index 4ef8a7d..0000000 --- a/hackathon/databroker/Readme.md +++ /dev/null @@ -1,26 +0,0 @@ -# Swift + DataBroker - -This set of files demonstrates how to access a DataBroker from swift. - -To run this demo you need - - - a python 3.6 with requirements.txt installed - - a running local mongo server - - a python interpreter embedded in swift/t with swift-requirements.txt installed - -To run this: - - - copy local.yml to `~/.config/databroker/local.yml` (or someplace - else on the search path, see - https://nsls-ii.github.io/databroker/configuration.html#search-path - ) - - run `gen_data.py` to load some synthetic data into the database - - run `/your/install/path/swift-t -n 6 -l read_databroker.swift` - -You should see 100 rows of data like - -``` -[4] processed: '100 aadd1a95-b812-4038-9f8d-cf59539da413: {'det': 1.9287498479639178e-22, 'motor': 10.0, 'motor_setpoint': 10.0}' -``` - -be printed in slightly jumbled order. diff --git a/hackathon/databroker/conda-env.yml b/hackathon/databroker/conda-env.yml new file mode 100644 index 0000000..3df3823 --- /dev/null +++ b/hackathon/databroker/conda-env.yml @@ -0,0 +1,13 @@ +name: swift-t +channels: + - lightsource2-tag + - defaults +dependencies: + - bluesky + - databroker + - ipython + - numpy + - ophyd + - pymongo + - python=3.6 + - swift-t diff --git a/hackathon/databroker/gen_data.py b/hackathon/databroker/gen_data.py index 255c88b..a4d39d4 100644 --- a/hackathon/databroker/gen_data.py +++ b/hackathon/databroker/gen_data.py @@ -2,11 +2,14 @@ import bluesky.plans as bp from bluesky import RunEngine from bluesky.callbacks.zmq import Publisher +from bluesky.callbacks.best_effort import BestEffortCallback from ophyd.sim import motor, det db = databroker.Broker.named('local') +bec = BestEffortCallback() RE = RunEngine() RE.subscribe(db.insert) +RE.subscribe(bec) pub = Publisher('localhost:2000', RE=RE) diff --git a/hackathon/databroker/read_databroker.swift b/hackathon/databroker/read_databroker.swift index 9d4026b..2befb32 100644 --- a/hackathon/databroker/read_databroker.swift +++ b/hackathon/databroker/read_databroker.swift @@ -21,12 +21,9 @@ deserializer = pickle.loads address = ('localhost', 2001) _context = zmq.Context() -print(f'=== _context: {_context}') _socket = _context.socket(zmq.SUB) -print(f'=== _socket: {_socket}') url = "tcp://%s:%d" % address -print(f'=== _url: {url}') _socket.connect(url) print('connect') @@ -40,7 +37,7 @@ def _poll(_socket, deserializer, DocumentNames): hostname = hostname.decode() name = name.decode() doc = deserializer(doc) - yield (DocumentNames[name], doc) + yield (name, doc) doc_gen = _poll(_socket, deserializer, DocumentNames) ----, @@ -68,15 +65,12 @@ except StopIteration: { result = python_persist( ---- -import time -time.sleep(1) -ev = %s -print(type(ev)) +name, doc = %s ---- % inp , ---- -f"{ev['seq_num']} {ev['uid']}: {ev['data']}" +name ---- ); @@ -87,7 +81,7 @@ global const int MAX = 1000; process() { for (int i = 0, boolean stop = false; - i < MAX && !stop; + !stop; i = i+1, stop = end_of_data) { line = get_event();