Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Data Security work for CHX #29

Open
wants to merge 25 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
17be6b1
Added changes for RedisJSONDict, Timepix3 and Eiger1M
Jul 22, 2024
5eeb6b8
ENH: utility to get a list of users assigned to a proposal
genematx Jul 23, 2024
8e07b08
WIP: changing paths of ProsilicaWithTIFFV33 detectors
Jul 23, 2024
c4e8fa8
MNT: update CI/CD environments
genematx Oct 9, 2024
e2f3ad6
Added new paths for xray-eye and OAV devices
Oct 9, 2024
a244573
Fix bug with time package
Oct 9, 2024
e580cfd
MNT: removed references to persistent dict
Oct 10, 2024
a5d02c4
MNT: remove old monkeypatching ophyd code
Oct 10, 2024
9f273ee
Cleaned 00-base.py
Oct 10, 2024
ad0404e
Cleaned 20-area-detectors.py
Oct 10, 2024
bf8aa32
ENH: configure tiled clients
genematx Oct 10, 2024
39b8cb2
Modified paths for eiger detectors
Oct 10, 2024
42f7dec
Modified paths for eiger4m
Oct 10, 2024
8b04754
WIP: Added tiled client
Oct 10, 2024
a8e8e0b
Removed print from olog
Oct 10, 2024
bf4480b
Modified paths for optional startup scripts
Oct 11, 2024
1945d2e
Merge branch 'master' into data-security-implementation
jmaruland Oct 11, 2024
132e9a3
WIP: started replacing db with tiled_reading_client
Oct 11, 2024
4070791
ENH: use stage method to update file paths in detectors
genematx Jan 23, 2025
15857ca
Merge pull request #33 from genematx/data-security-implementation
genematx Jan 23, 2025
7d33866
ENH: update writing paths for timepix
Jan 23, 2025
25011b9
MNT: clean-up
Jan 23, 2025
3d03b98
FIX: filepath for timepix
Jan 23, 2025
2cc15f4
Added fixes for tiled compatibility
Jan 24, 2025
d213ec0
Use old detector names and define a mapping to the saving locations
Jan 27, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -74,3 +74,6 @@ zz-dama.py

# Jupyter Notebook Checkpoint
.ipynb_checkpoints/

# MacOS Attributes File
*.DS_Store
14 changes: 4 additions & 10 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,18 +7,12 @@ resources:
endpoint: github

jobs:
- template: collection-2021-1.2.yml@templates
- template: 2024-2.3-py310.yml@templates
parameters:
beamline_acronym: CHX
- template: nsls2-collection-2021-2.2.yml@templates
- template: 2024-2.3-py311.yml@templates
parameters:
beamline_acronym: CHX
- template: nsls2-collection-2021-2.2-py39.yml@templates
- template: 2024-2.3-py312-tiled.yml@templates
parameters:
beamline_acronym: CHX
- template: nsls2-collection-2021-3.0-py37.yml@templates
parameters:
beamline_acronym: CHX
- template: nsls2-collection-2021-3.0-py39.yml@templates
parameters:
beamline_acronym: CHX
beamline_acronym: CHX
16 changes: 11 additions & 5 deletions optional_startup/21-ad-pil800k.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,9 +122,9 @@ class Pilatus800V33(SingleTriggerV33, PilatusDetector):
tiff = Cpt(
TIFFPluginWithFileStore,
suffix="TIFF1:",
read_path_template="/nsls2/data/chx/legacy/data/%Y/%m/%d/",
write_path_template="/nsls2/data/chx/legacy/data/%Y/%m/%d/",
root="/nsls2/data/chx/legacy/data",
read_path_template="",
write_path_template="",
root="",
)
# root='/')

Expand All @@ -151,9 +151,15 @@ class Pilatus800V33(PilatusV33):
tiff = Cpt(
TIFFPluginWithFileStore,
suffix="TIFF1:",
write_path_template="/nsls2/data/chx/legacy/data/%Y/%m/%d/",
root="/nsls2/data/chx/legacy/data",
write_path_template="",
root="",
)

def stage(self, *args, **kwargs):
self.file.write_path_template = assets_path() + f'{self.name}/%Y/%m/%d/'
self.file.reg_root = assets_path() + f'{self.name}'
return super().stage(*args, **kwargs)

Pilatus800_on = True
if Pilatus800_on == True:
pilatus800 = Pilatus800V33("XF:11IDB-ES{Det:P800k}", name="pilatus800")
Expand Down
16 changes: 11 additions & 5 deletions optional_startup/21-pilatus_800ks.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,9 +122,9 @@ class Pilatus800V33(SingleTriggerV33, PilatusDetector):
tiff = Cpt(
TIFFPluginWithFileStore,
suffix="TIFF1:",
read_path_template="/nsls2/data/chx/legacy/data/%Y/%m/%d/",
write_path_template="/nsls2/data/chx/legacy/data/%Y/%m/%d/",
root="/nsls2/data/chx/legacy/data",
read_path_template="",
write_path_template="",
root="",
)
# root='/')

Expand All @@ -151,9 +151,15 @@ class Pilatus800V33(PilatusV33):
tiff = Cpt(
TIFFPluginWithFileStore,
suffix="TIFF1:",
write_path_template="/nsls2/data/chx/legacy/data/%Y/%m/%d/",
root="/nsls2/data/chx/legacy/data",
write_path_template="",
root="",
)

def stage(self, *args, **kwargs):
self.file.write_path_template = assets_path() + f'{self.name}/%Y/%m/%d/'
self.file.reg_root = assets_path() + f'{self.name}'
return super().stage(*args, **kwargs)

Pilatus800_on = True
if Pilatus800_on == True:
pilatus800 = Pilatus800V33("XF:11IDB-ES{Det:P800k}", name="pilatus800")
Expand Down
36 changes: 32 additions & 4 deletions optional_startup/9999-tpx3.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,9 +68,9 @@ def __init__(self, *args, **kwargs):

def stage(self):
# TODO also do the images

self._res_uid = res_uid = new_short_uid()
write_path_template = 'file:/nsls2/data/chx/legacy/data/%Y/%m/%d/'
write_path_template = "file:" + assets_path() + "timepix-1/%Y/%m/%d/"
self._write_path = write_path = datetime.now().strftime(write_path_template)
self.raw_filepath.set(write_path).wait()

Expand All @@ -85,9 +85,11 @@ def stage(self):
self.update_file_template()
# reset this back to 0!
self._n = 0

super().stage()



def update_file_template(self):
# The server generates files with in a trigger that are formatted as
# filepath/template{datetime only}_{j:d6}.tpx3
Expand Down Expand Up @@ -115,11 +117,28 @@ def unstage(self) -> List[object]:
return super().unstage()


class Tpx3HDF(Device):
# Use the HDF5 plugin directory creation feature to avoid having to
# create directory creation logic for TPX FW explicitly.
hdf5_file_path = Cpt(EpicsSignalWithRBV, "FilePath", kind="omitted")
hdf5_create_directory = Cpt(EpicsSignalWithRBV, "CreateDirectory", kind="omitted")


def stage(self):
self.hdf5_create_directory.set(-4)
write_path_template = assets_path() + "timepix-1/%Y/%m/%d/"
write_path = datetime.now().strftime(write_path_template)
self.hdf5_file_path.put(write_path)

class TimePixDetector(SingleTriggerV33, AreaDetector):
_default_configuration_attrs = None
_default_read_attrs = None

hdf_plugin = Cpt(Tpx3HDF, "HDF1:")

files = Cpt(Tpx3Files, "cam1:")



stats1 = Cpt(StatsPlugin_V34, "Stats1:")
stats2 = Cpt(StatsPlugin_V34, "Stats2:")
Expand All @@ -134,7 +153,14 @@ class TimePixDetector(SingleTriggerV33, AreaDetector):
ts1 = Cpt(TimeSeriesPlugin_V34, "Stats1:TS:")
ts2 = Cpt(TimeSeriesPlugin_V34, "Stats2:TS:")
ts3 = Cpt(TimeSeriesPlugin_V34, "Stats3:TS:")
ts4 = Cpt(TimeSeriesPlugin_V34, "Stats4:TS:")
ts4 = Cpt(TimeSeriesPlugin_V34, "Stats4:TS:")

# def stage(self):
# self.hdf5_create_directory.set(-4).wait()
# write_path_template = assets_path() + "timepix-1/%Y/%m/%d/"
# write_path = datetime.now().strftime(write_path_template)
# self.hdf5_file_path.set(write_path).wait()
# self.files.stage()

def trigger(self):
self.files.update_file_template()
Expand All @@ -159,13 +185,15 @@ def set_total_exposure(self, exp, *, max_exposure=10):
yield from bps.mv(self.cam.acquire_time, real_exp)
yield from self.set_num_images(num_frames)


tpx3 = TimePixDetector("TPX3-TEST:", name="tpx3")

for j in range(1, 5):
stat = getattr(tpx3, f'stats{j}')
stat.kind = 'normal'
stat.total.kind = 'hinted'
stat.ts_total.kind = 'normal'


for j in [1, 2, 3, 4]:
getattr(tpx3, f'stats{j}').nd_array_port.set(f'ROI{j}')
148 changes: 44 additions & 104 deletions startup/00-base.py
Original file line number Diff line number Diff line change
@@ -1,113 +1,53 @@
import nslsii
from bluesky import RunEngine
nslsii.configure_base(
get_ipython().user_ns,
'chx',
publish_documents_with_kafka=True
)
import redis
import os

# from tiled.client import from_profile
# from databroker.v1 import Broker
import time
from redis_json_dict import RedisJSONDict
from tiled.client import from_profile
from ophyd.signal import EpicsSignalBase
from databroker import Broker

# c = from_profile("chx-secure")
EpicsSignalBase.set_defaults(timeout=60, connection_timeout=60) # new style

# db = Broker(c)
# Configure a Tiled writing client
tiled_writing_client = from_profile("nsls2", api_key=os.environ["TILED_BLUESKY_WRITING_API_KEY_CHX"])["chx"]["raw"]

class TiledInserter:
def insert(self, name, doc):
ATTEMPTS = 20
error = None
for _ in range(ATTEMPTS):
try:
tiled_writing_client.post_document(name, doc)
except Exception as exc:
print("Document saving failure:", repr(exc))
error = exc
else:
break
time.sleep(2)
else:
# Out of attempts
raise error

tiled_inserter = TiledInserter()

# The function below initializes RE and subscribes tiled_inserter to it
nslsii.configure_base(get_ipython().user_ns,
tiled_inserter,
publish_documents_with_kafka=True)

print("Initializing Tiled reading client...\nMake sure you check for duo push.")
tiled_reading_client = from_profile("nsls2", username=None, include_data_sources=True)["chx"]["raw"]

db = Broker(tiled_reading_client)

# set plot properties for 4k monitors
plt.rcParams['figure.dpi']=200

from pathlib import Path

import appdirs


try:
from bluesky.utils import PersistentDict
except ImportError:
import msgpack
import msgpack_numpy
import zict

class PersistentDict(zict.Func):
def __init__(self, directory):
self._directory = directory
self._file = zict.File(directory)
super().__init__(self._dump, self._load, self._file)

@property
def directory(self):
return self._directory

def __repr__(self):
return f"<{self.__class__.__name__} {dict(self)!r}>"

@staticmethod
def _dump(obj):
"Encode as msgpack using numpy-aware encoder."
# See https://github.com/msgpack/msgpack-python#string-and-binary-type
# for more on use_bin_type.
return msgpack.packb(
obj,
default=msgpack_numpy.encode,
use_bin_type=True)

@staticmethod
def _load(file):
return msgpack.unpackb(
file,
object_hook=msgpack_numpy.decode,
raw=False)

runengine_metadata_dir = appdirs.user_data_dir(appname="bluesky") / Path("runengine-metadata")

# PersistentDict will create the directory if it does not exist
RE.md = PersistentDict(runengine_metadata_dir)

# send ophyd debug log to the console
# import logging
#logging.getLogger('ophyd').setLevel('DEBUG')
#console_handler = logging.StreamHandler()
#console_handler.setLevel("DEBUG")
#logging.getLogger('ophyd').addHandler(console_handler)

###############################################################################
# TODO: remove this block once https://github.com/bluesky/ophyd/pull/959 is
# merged/released.
import time
from datetime import datetime
from ophyd.signal import EpicsSignalBase, EpicsSignal, DEFAULT_CONNECTION_TIMEOUT

def print_now():
return datetime.strftime(datetime.now(), '%Y-%m-%d %H:%M:%S.%f')

def wait_for_connection_base(self, timeout=DEFAULT_CONNECTION_TIMEOUT):
'''Wait for the underlying signals to initialize or connect'''
if timeout is DEFAULT_CONNECTION_TIMEOUT:
timeout = self.connection_timeout
# print(f'{print_now()}: waiting for {self.name} to connect within {timeout:.4f} s...')
start = time.time()
try:
self._ensure_connected(self._read_pv, timeout=timeout)
# print(f'{print_now()}: waited for {self.name} to connect for {time.time() - start:.4f} s.')
except TimeoutError:
if self._destroyed:
raise DestroyedError('Signal has been destroyed')
raise

def wait_for_connection(self, timeout=DEFAULT_CONNECTION_TIMEOUT):
'''Wait for the underlying signals to initialize or connect'''
if timeout is DEFAULT_CONNECTION_TIMEOUT:
timeout = self.connection_timeout
# print(f'{print_now()}: waiting for {self.name} to connect within {timeout:.4f} s...')
start = time.time()
self._ensure_connected(self._read_pv, self._write_pv, timeout=timeout)
# print(f'{print_now()}: waited for {self.name} to connect for {time.time() - start:.4f} s.')

EpicsSignalBase.wait_for_connection = wait_for_connection_base
EpicsSignal.wait_for_connection = wait_for_connection
###############################################################################

from ophyd.signal import EpicsSignalBase
# EpicsSignalBase.set_default_timeout(timeout=10, connection_timeout=10) # old style
EpicsSignalBase.set_defaults(timeout=60, connection_timeout=60) # new style
# Set the metadata dictionary
RE.md = RedisJSONDict(redis.Redis("info.chx.nsls2.bnl.gov"), prefix="")

# Setup the path to the secure assets folder for the current proposal
def assets_path():
return f"/nsls2/data/chx/proposals/{RE.md['cycle']}/{RE.md['data_session']}/assets/"
3 changes: 3 additions & 0 deletions startup/01-chxsetup.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,3 +21,6 @@ def print_scan_ids(name, start_doc):
from chxtools import transfuncs as trans
from chxtools import bpm_stability as bpmst

import sys
sys.path.insert(0,'/nsls2/data2/chx/shared/CHX_Software/packages/pass_dict')
from pass_database import *
Loading