diff --git a/CDB/thijs_ConfigDb.json b/CDB/thijs_ConfigDb.json new file mode 100644 index 0000000000000000000000000000000000000000..839a4e6d475d3f0783fb8a2bbda12e2163d93eb0 --- /dev/null +++ b/CDB/thijs_ConfigDb.json @@ -0,0 +1,108 @@ +{ + "servers": { + "PCC": { + "1": { + "PCC": { + "LTS/PCC/1": { + "properties": { + "OPC_Server_Name": [ + "host.docker.internal" + ] + } + } + } + } + }, + "SDP": { + "1": { + "SDP": { + "LTS/SDP/1": { + "properties": { + "OPC_Server_Name": [ + "host.docker.internal" + ] + } + } + } + } + }, + "example_device": { + "1": { + "example_device": { + "LTS/example_device/1": { + "attribute_properties": { + "Ant_mask_RW": { + "archive_period": [ + "600000" + ] + } + }, + "properties": { + "OPC_Server_Name": [ + "host.docker.internal" + ], + "OPC_Server_Port": [ + "4842" + ], + "OPC_Time_Out": [ + "5.0" + ] + } + } + } + } + }, + "ini_device": { + "1": { + "ini_device": { + "LTS/ini_device/1": { + "attribute_properties": { + "Ant_mask_RW": { + "archive_period": [ + "600000" + ] + } + }, + "properties": { + "OPC_Server_Name": [ + "host.docker.internal" + ], + "OPC_Server_Port": [ + "4842" + ], + "OPC_Time_Out": [ + "5.0" + ] + } + } + } + } + }, + "SNMP": { + "1": { + "SNMP": { + "LTS/SNMP/1": { + "attribute_properties": { + "Ant_mask_RW": { + "archive_period": [ + "600000" + ] + } + }, + "properties": { + "SNMP_community": [ + "public" + ], + "SNMP_host": [ + "192.168.178.17" + ], + "SNMP_timeout": [ + "5.0" + ] + } + } + } + } + } + } +} diff --git a/PCC/LICENSE.txt b/deprecated/PCC/LICENSE.txt similarity index 100% rename from PCC/LICENSE.txt rename to deprecated/PCC/LICENSE.txt diff --git a/PCC/MANIFEST.in b/deprecated/PCC/MANIFEST.in similarity index 100% rename from PCC/MANIFEST.in rename to deprecated/PCC/MANIFEST.in diff --git a/PCC/NOTICE b/deprecated/PCC/NOTICE similarity index 100% rename from PCC/NOTICE rename to deprecated/PCC/NOTICE diff --git a/PCC/PCC/PCC.py b/deprecated/PCC/PCC/PCC.py similarity index 100% rename from PCC/PCC/PCC.py rename to deprecated/PCC/PCC/PCC.py diff --git a/PCC/PCC/__init__.py b/deprecated/PCC/PCC/__init__.py similarity index 100% rename from PCC/PCC/__init__.py rename to deprecated/PCC/PCC/__init__.py diff --git a/PCC/PCC/__main__.py b/deprecated/PCC/PCC/__main__.py similarity index 100% rename from PCC/PCC/__main__.py rename to deprecated/PCC/PCC/__main__.py diff --git a/PCC/PCC/opcua_connection.py b/deprecated/PCC/PCC/opcua_connection.py similarity index 100% rename from PCC/PCC/opcua_connection.py rename to deprecated/PCC/PCC/opcua_connection.py diff --git a/PCC/PCC/release.py b/deprecated/PCC/PCC/release.py similarity index 100% rename from PCC/PCC/release.py rename to deprecated/PCC/PCC/release.py diff --git a/PCC/PCC/wrappers.py b/deprecated/PCC/PCC/wrappers.py similarity index 100% rename from PCC/PCC/wrappers.py rename to deprecated/PCC/PCC/wrappers.py diff --git a/PCC/README.rst b/deprecated/PCC/README.rst similarity index 100% rename from PCC/README.rst rename to deprecated/PCC/README.rst diff --git a/PCC/requirements.txt b/deprecated/PCC/requirements.txt similarity index 100% rename from PCC/requirements.txt rename to deprecated/PCC/requirements.txt diff --git a/PCC/setup.py b/deprecated/PCC/setup.py similarity index 100% rename from PCC/setup.py rename to deprecated/PCC/setup.py diff --git a/PCC/test/PCC_test.py b/deprecated/PCC/test/PCC_test.py similarity index 100% rename from PCC/test/PCC_test.py rename to deprecated/PCC/test/PCC_test.py diff --git a/PCC/test/__init__.py b/deprecated/PCC/test/__init__.py similarity index 100% rename from PCC/test/__init__.py rename to deprecated/PCC/test/__init__.py diff --git a/PCC/test/test-PCC.py b/deprecated/PCC/test/test-PCC.py similarity index 100% rename from PCC/test/test-PCC.py rename to deprecated/PCC/test/test-PCC.py diff --git a/SDP/LICENSE.txt b/deprecated/SDP/LICENSE.txt similarity index 100% rename from SDP/LICENSE.txt rename to deprecated/SDP/LICENSE.txt diff --git a/SDP/MANIFEST.in b/deprecated/SDP/MANIFEST.in similarity index 100% rename from SDP/MANIFEST.in rename to deprecated/SDP/MANIFEST.in diff --git a/SDP/NOTICE b/deprecated/SDP/NOTICE similarity index 100% rename from SDP/NOTICE rename to deprecated/SDP/NOTICE diff --git a/SDP/README.rst b/deprecated/SDP/README.rst similarity index 100% rename from SDP/README.rst rename to deprecated/SDP/README.rst diff --git a/SDP/SDP/SDP.py b/deprecated/SDP/SDP/SDP.py similarity index 100% rename from SDP/SDP/SDP.py rename to deprecated/SDP/SDP/SDP.py diff --git a/SDP/SDP/__init__.py b/deprecated/SDP/SDP/__init__.py similarity index 100% rename from SDP/SDP/__init__.py rename to deprecated/SDP/SDP/__init__.py diff --git a/SDP/SDP/__main__.py b/deprecated/SDP/SDP/__main__.py similarity index 100% rename from SDP/SDP/__main__.py rename to deprecated/SDP/SDP/__main__.py diff --git a/SDP/SDP/opcua_connection.py b/deprecated/SDP/SDP/opcua_connection.py similarity index 100% rename from SDP/SDP/opcua_connection.py rename to deprecated/SDP/SDP/opcua_connection.py diff --git a/SDP/SDP/release.py b/deprecated/SDP/SDP/release.py similarity index 100% rename from SDP/SDP/release.py rename to deprecated/SDP/SDP/release.py diff --git a/SDP/SDP/wrappers.py b/deprecated/SDP/SDP/wrappers.py similarity index 100% rename from SDP/SDP/wrappers.py rename to deprecated/SDP/SDP/wrappers.py diff --git a/SDP/requirements.txt b/deprecated/SDP/requirements.txt similarity index 100% rename from SDP/requirements.txt rename to deprecated/SDP/requirements.txt diff --git a/SDP/setup.py b/deprecated/SDP/setup.py similarity index 100% rename from SDP/setup.py rename to deprecated/SDP/setup.py diff --git a/devices/APSCTL.py b/devices/APSCTL.py deleted file mode 100644 index e4c4dd38eaa9ab3aa18665093275749a54f3d94f..0000000000000000000000000000000000000000 --- a/devices/APSCTL.py +++ /dev/null @@ -1,187 +0,0 @@ -# -*- coding: utf-8 -*- -# -# This file is part of the SDP project -# -# -# -# Distributed under the terms of the APACHE license. -# See LICENSE.txt for more info. - -""" SDP Device Server for LOFAR2.0 - -""" - -# PyTango imports -from tango.server import run -from tango.server import device_property -from tango import AttrWriteType - -#attribute extention and hardware device imports -from src.attribute_wrapper import attribute_wrapper -from src.hardware_device import hardware_device -import numpy -# Additional import - -from clients.opcua_connection import OPCUAConnection - - -__all__ = ["APSCTL", "main"] - -class APSCTL(hardware_device): - """ - - **Properties:** - - - Device Property - OPC_Server_Name - - Type:'DevString' - OPC_Server_Port - - Type:'DevULong' - OPC_Time_Out - - Type:'DevDouble' - """ - - # ----------------- - # Device Properties - # ----------------- - - OPC_Server_Name = device_property( - dtype='DevString', - mandatory=True - ) - - OPC_Server_Port = device_property( - dtype='DevULong', - mandatory=True - ) - - OPC_Time_Out = device_property( - dtype='DevDouble', - mandatory=True - ) - - # ---------- - # Attributes - # ---------- - N_unb = 2 - N_fpga = 4 - N_ddr = 2 - N_qsfp = 6 - - - # Central CP per Uniboard - UNB2_Power_ON_OFF_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_Power_ON_OFF_RW"], datatype=numpy.bool_, dims=(N_unb,), access=AttrWriteType.READ_WRITE) - UNB2_Front_Panel_LED_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_Front_Panel_LED_RW"], datatype=numpy.uint8, dims=(N_unb,), access=AttrWriteType.READ_WRITE) - UNB2_Mask_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_Mask_RW"], datatype=numpy.bool_, dims=(N_unb,), access=AttrWriteType.READ_WRITE) - # Central MP per Uniboard - UNB2_I2C_bus_OK_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_I2C_bus_OK_R"], datatype=numpy.bool_, dims=(N_unb,)) - UNB2_Front_Panel_LED_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_Front_Panel_LED_R"], datatype=numpy.uint8, dims=(N_unb,)) - UNB2_EEPROM_Serial_Number_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_EEPROM_Serial_Number_R"], datatype=numpy.str, dims=(N_unb,)) - UNB2_EEPROM_Unique_ID_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_EEPROM_Unique_ID_R"], datatype=numpy.uint32, dims=(N_unb,)) - UNB2_DC_DC_48V_12V_VIN_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_VIN_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_DC_DC_48V_12V_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_VOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_DC_DC_48V_12V_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_IOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_DC_DC_48V_12V_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_DC_DC_48V_12V_TEMP_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_QSFP_N01_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N01_VOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_QSFP_N01_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N01_IOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_QSFP_N01_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N01_TEMP_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_QSFP_N23_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N23_VOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_QSFP_N23_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N23_IOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_QSFP_N23_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_QSFP_N23_TEMP_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_SWITCH_1V2_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_1V2_VOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_SWITCH_1V2_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_1V2_IOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_SWITCH_1V2_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_1V2_TEMP_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_SWITCH_PHY_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_PHY_VOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_SWITCH_PHY_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_PHY_IOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_SWITCH_PHY_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_SWITCH_PHY_TEMP_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_CLOCK_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_CLOCK_VOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_CLOCK_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_CLOCK_IOUT_R"], datatype=numpy.double, dims=(N_unb,)) - UNB2_POL_CLOCK_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_POL_CLOCK_TEMP_R"], datatype=numpy.double, dims=(N_unb,)) - - # monitor points per FPGA - UNB2_FPGA_DDR4_SLOT_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_DDR4_SLOT_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_DDR4_SLOT_PART_NUMBER_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_DDR4_SLOT_PART_NUMBER_R"], datatype=numpy.str, dims=(N_unb * N_qsfp,N_fpga)) - UNB2_FPGA_QSFP_CAGE_0_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_0_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_1_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_1_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_2_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_2_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_3_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_3_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_4_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_4_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_5_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_5_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_0_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_0_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_1_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_1_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_2_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_2_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_3_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_3_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_4_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_4_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga)) - UNB2_FPGA_QSFP_CAGE_5_LOS_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_QSFP_CAGE_5_LOS_R"], datatype=numpy.uint8, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_CORE_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_CORE_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_CORE_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_CORE_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_CORE_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_CORE_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_ERAM_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_ERAM_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_ERAM_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_ERAM_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_ERAM_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_ERAM_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_RXGXB_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_RXGXB_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_RXGXB_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_RXGXB_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_RXGXB_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_RXGXB_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_TXGXB_VOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_TXGXB_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_TXGXB_IOUT_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_TXGXB_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_TXGXB_TEMP_R = attribute_wrapper(comms_annotation=["2:PCC", "2:UNB2_FPGA_POL_TXGXB_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_HGXB_VOUT_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_HGXB_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_HGXB_IOUT_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_HGXB_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_HGXB_TEMP_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_HGXB_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_PGM_VOUT_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_PGM_VOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_PGM_IOUT_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_PGM_IOUT_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - UNB2_FPGA_POL_PGM_TEMP_R = attribute_wrapper(comms_annotation=["2:UNB2_FPGA_POL_PGM_TEMP_R"], datatype=numpy.double, dims=(N_unb,N_fpga)) - - - def delete_device(self): - """Hook to delete resources allocated in init_device. - - This method allows for any memory or other resources allocated in the - init_device method to be released. This method is called by the device - destructor and by the device Init command (a Tango built-in). - """ - self.debug_stream("Shutting down...") - - self.Off() - self.debug_stream("Shut down. Good bye.") - - # -------- - # overloaded functions - # -------- - def off(self): - """ user code here. is called when the state is set to OFF """ - - # Stop keep-alive - self.opcua_connection.stop() - - def initialise(self): - """ user code here. is called when the sate is set to INIT """ - """Initialises the attributes and properties of the PCC.""" - - # set up the OPC ua client - self.OPCua_client = OPCUAConnection("opc.tcp://{}:{}/".format(self.OPC_Server_Name, self.OPC_Server_Port), "http://lofar.eu", self.OPC_Time_Out, self.Fault, self) - - # map an access helper class - for i in self.attr_list(): - try: - i.set_comm_client(self.OPCua_client) - except: - self.debug_stream("error in getting APSCTL attribute: {} from client".format(i)) - - self.OPCua_client.start() - - # -------- - # Commands - # -------- - -# ---------- -# Run server -# ---------- -def main(args=None, **kwargs): - """Main function of the SDP module.""" - return run((APSCTL,), args=args, **kwargs) - - -if __name__ == '__main__': - main() - diff --git a/devices/PCC.py b/devices/PCC.py index 96ef1013fa310d517fbf4637d1c423e08c10db15..ff0ed0914f43272ab195b8a55f8f550e7fdd3ceb 100644 --- a/devices/PCC.py +++ b/devices/PCC.py @@ -19,12 +19,12 @@ from tango import AttrWriteType import numpy # Additional import -from src.wrappers import * +from util.wrappers import * from clients.opcua_connection import OPCUAConnection -from src.attribute_wrapper import attribute_wrapper -from src.hardware_device import hardware_device -from src.lofar_logging import device_logging_to_python +from util.attribute_wrapper import attribute_wrapper +from util.hardware_device import hardware_device +from util.lofar_logging import device_logging_to_python, log_exceptions __all__ = ["PCC", "main"] @@ -100,6 +100,7 @@ class PCC(hardware_device): RCU_monitor_rate_RW = attribute_wrapper(comms_annotation=["2:PCC", "2:RCU_monitor_rate_RW"], datatype=numpy.float64, access=AttrWriteType.READ_WRITE) + @log_exceptions() def delete_device(self): """Hook to delete resources allocated in init_device. @@ -115,12 +116,14 @@ class PCC(hardware_device): # -------- # overloaded functions # -------- - def off(self): + @log_exceptions() + def configure_for_off(self): """ user code here. is called when the state is set to OFF """ # Stop keep-alive self.OPCua_client.stop() - def initialise(self): + @log_exceptions() + def configure_for_initialise(self): """ user code here. is called when the state is set to INIT """ # Init the dict that contains function to OPC-UA function mappings. @@ -146,13 +149,15 @@ class PCC(hardware_device): self.OPCua_client.start() + + # -------- # Commands # -------- @command() @DebugIt() - @only_when_on - @fault_on_error + @only_when_on() + @fault_on_error() def RCU_off(self): """ @@ -162,8 +167,8 @@ class PCC(hardware_device): @command() @DebugIt() - @only_when_on - @fault_on_error + @only_when_on() + @fault_on_error() def RCU_on(self): """ @@ -173,8 +178,8 @@ class PCC(hardware_device): @command() @DebugIt() - @only_when_on - @fault_on_error + @only_when_on() + @fault_on_error() def ADC_on(self): """ @@ -184,8 +189,8 @@ class PCC(hardware_device): @command() @DebugIt() - @only_when_on - @fault_on_error + @only_when_on() + @fault_on_error() def RCU_update(self): """ @@ -195,8 +200,8 @@ class PCC(hardware_device): @command() @DebugIt() - @only_when_on - @fault_on_error + @only_when_on() + @fault_on_error() def CLK_off(self): """ @@ -206,8 +211,8 @@ class PCC(hardware_device): @command() @DebugIt() - @only_when_on - @fault_on_error + @only_when_on() + @fault_on_error() def CLK_on(self): """ @@ -217,8 +222,8 @@ class PCC(hardware_device): @command() @DebugIt() - @only_when_on - @fault_on_error + @only_when_on() + @fault_on_error() def CLK_PLL_setup(self): """ diff --git a/devices/SDP.py b/devices/SDP.py index 35c03232e8e0fb42b272f25325f20cad9ba03340..2d14a96fc6f977749a920d7b1414a2febb7fe7b6 100644 --- a/devices/SDP.py +++ b/devices/SDP.py @@ -18,10 +18,10 @@ from tango import AttrWriteType # Additional import from clients.opcua_connection import OPCUAConnection -from src.attribute_wrapper import attribute_wrapper -from src.hardware_device import hardware_device +from util.attribute_wrapper import attribute_wrapper +from util.hardware_device import hardware_device -from src.lofar_logging import device_logging_to_python +from util.lofar_logging import device_logging_to_python, log_exceptions import numpy @@ -72,6 +72,23 @@ class SDP(hardware_device): fpga_version_R = attribute_wrapper(comms_annotation=["1:fpga_version_R"], datatype=numpy.str_, dims=(16,)) fpga_weights_R = attribute_wrapper(comms_annotation=["1:fpga_weights_R"], datatype=numpy.int16, dims=(16, 12 * 488 * 2)) fpga_weights_RW = attribute_wrapper(comms_annotation=["1:fpga_weights_RW"], datatype=numpy.int16, dims=(16, 12 * 488 * 2), access=AttrWriteType.READ_WRITE) + fpga_processing_enable_RW = attribute_wrapper(comms_annotation=["1:fpga_processing_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE) + fpga_processing_enable_R = attribute_wrapper(comms_annotation=["1:fpga_processing_enable_R"], datatype=numpy.bool_, dims=(16,)) + fpga_sst_offload_enable_RW = attribute_wrapper(comms_annotation=["1:fpga_sst_offload_enable_RW"], datatype=numpy.bool_, dims=(16,), access=AttrWriteType.READ_WRITE) + fpga_sst_offload_enable_R = attribute_wrapper(comms_annotation=["1:fpga_sst_offload_enable_R"], datatype=numpy.bool_, dims=(16,)) + fpga_sst_offload_dest_mac_RW = attribute_wrapper(comms_annotation=["1:fpga_sst_offload_dest_mac_RW"], datatype=numpy.str_, dims=(16,), access=AttrWriteType.READ_WRITE) + fpga_sst_offload_dest_mac_R = attribute_wrapper(comms_annotation=["1:fpga_sst_offload_dest_mac_R"], datatype=numpy.str_, dims=(16,)) + fpga_sst_offload_dest_ip_RW = attribute_wrapper(comms_annotation=["1:fpga_sst_offload_dest_ip_RW"], datatype=numpy.str_, dims=(16,), access=AttrWriteType.READ_WRITE) + fpga_sst_offload_dest_ip_R = attribute_wrapper(comms_annotation=["1:fpga_sst_offload_dest_ip_R"], datatype=numpy.str_, dims=(16,)) + fpga_sst_offload_dest_port_RW = attribute_wrapper(comms_annotation=["1:fpga_sst_offload_dest_port_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE) + fpga_sst_offload_dest_port_R = attribute_wrapper(comms_annotation=["1:fpga_sst_offload_dest_port_R"], datatype=numpy.uint16, dims=(16,)) + fpga_sdp_info_station_id_RW = attribute_wrapper(comms_annotation=["1:fpga_sdp_info_station_id_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE) + fpga_sdp_info_station_id_R = attribute_wrapper(comms_annotation=["1:fpga_sdp_info_station_id_R"], datatype=numpy.uint16, dims=(16,)) + fpga_sdp_info_observation_id_RW = attribute_wrapper(comms_annotation=["1:fpga_sdp_info_observation_id_RW"], datatype=numpy.uint32, dims=(16,), access=AttrWriteType.READ_WRITE) + fpga_sdp_info_observation_id_R = attribute_wrapper(comms_annotation=["1:fpga_sdp_info_observation_id_R"], datatype=numpy.uint32, dims=(16,)) + fpga_sdp_info_source_id_RW = attribute_wrapper(comms_annotation=["1:fpga_sdp_info_source_id_RW"], datatype=numpy.uint16, dims=(16,), access=AttrWriteType.READ_WRITE) + fpga_sdp_info_source_id_R = attribute_wrapper(comms_annotation=["1:fpga_sdp_info_source_id_R"], datatype=numpy.uint16, dims=(16,)) + tr_busy_R = attribute_wrapper(comms_annotation=["1:tr_busy_R"], datatype=numpy.bool_) # NOTE: typo in node name is 'tr_reload_W' should be 'tr_reload_RW' tr_reload_RW = attribute_wrapper(comms_annotation=["1:tr_reload_W"], datatype=numpy.bool_, access=AttrWriteType.READ_WRITE) @@ -82,6 +99,7 @@ class SDP(hardware_device): """Method always executed before any TANGO command is executed.""" pass + @log_exceptions() def delete_device(self): """Hook to delete resources allocated in init_device. @@ -97,15 +115,17 @@ class SDP(hardware_device): # -------- # overloaded functions # -------- - def off(self): + @log_exceptions() + def configure_for_off(self): """ user code here. is called when the state is set to OFF """ # Stop keep-alive self.opcua_connection.stop() - def initialise(self): + @log_exceptions() + def configure_for_initialise(self): """ user code here. is called when the sate is set to INIT """ - """Initialises the attributes and properties of the PCC.""" + """Initialises the attributes and properties of the SDP.""" # set up the OPC ua client self.OPCua_client = OPCUAConnection("opc.tcp://{}:{}/".format(self.OPC_Server_Name, self.OPC_Server_Port), "http://lofar.eu", self.OPC_Time_Out, self.Fault, self) diff --git a/devices/__init__.py b/devices/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e44e9d156619faa3b3623f025a35d9e815fe4445 --- /dev/null +++ b/devices/__init__.py @@ -0,0 +1,3 @@ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions diff --git a/devices/clients/ini_client.py b/devices/clients/ini_client.py new file mode 100644 index 0000000000000000000000000000000000000000..2f4d714b57dd57d327795fe59fd6edf43eb4c9fa --- /dev/null +++ b/devices/clients/ini_client.py @@ -0,0 +1,192 @@ +from util.comms_client import CommClient +import configparser +import numpy + +__all__ = ["ini_client"] + + +numpy_to_ini_dict = { + numpy.int64: int, + numpy.double: float, + numpy.float64: float, + numpy.bool_: bool, + str: str +} + +numpy_to_ini_get_dict = { + numpy.int64: configparser.ConfigParser.getint, + numpy.double: configparser.ConfigParser.getfloat, + numpy.float64: configparser.ConfigParser.getfloat, + numpy.bool_: configparser.ConfigParser.getboolean, + str: str +} + +ini_to_numpy_dict = { + int: numpy.int64, + float: numpy.float64, + bool: numpy.bool_, + str: numpy.str_ +} + +import os + +class ini_client(CommClient): + """ + this class provides an example implementation of a comms_client. + Durirng initialisation it creates a correctly shaped zero filled value. on read that value is returned and on write its modified. + """ + + def start(self): + super().start() + + def __init__(self, filename, fault_func, streams, try_interval=2): + """ + initialises the class and tries to connect to the client. + """ + self.config = configparser.ConfigParser() + self.filename = filename + + super().__init__(fault_func, streams, try_interval) + + # Explicitly connect + if not self.connect(): + # hardware or infra is down -- needs fixing first + fault_func() + return + + def connect(self): + self.config_file = open(self.filename, "r") + + self.connected = True # set connected to true + return True # if successful, return true. otherwise return false + + def disconnect(self): + self.connected = False # always force a reconnect, regardless of a successful disconnect + self.streams.debug_stream("disconnected from the 'client' ") + + def _setup_annotation(self, annotation): + """ + this function gives the client access to the comm client annotation data given to the attribute wrapper. + The annotation data can be used to provide whatever extra data is necessary in order to find/access the monitor/control point. + + the annotation can be in whatever format may be required. it is up to the user to handle its content + example annotation may include: + - a file path and file line/location + - COM object path + + Annotations: + name: Required, the name of the ini variable + section: Required, the section of the ini variable + + """ + + # as this is an example, just print the annotation + self.streams.debug_stream("annotation: {}".format(annotation)) + name = annotation.get('name') + if name is None: + ValueError("ini client requires a variable `name` in the annotation to set/get") + section = annotation.get('section') + if section is None: + ValueError("requires a `section` specified in the annotation to open") + + return section, name + + + def _setup_value_conversion(self, attribute): + """ + gives the client access to the attribute_wrapper object in order to access all + necessary data such as dimensionality and data type + """ + + dim_y = attribute.dim_y + dim_x = attribute.dim_x + + dtype = attribute.numpy_type + + return dim_y, dim_x, dtype + + def _setup_mapping(self, name, section, dtype, dim_y, dim_x): + """ + takes all gathered data to configure and return the correct read and write functions + """ + + def read_function(): + self.config.read_file(self.config_file) + value = self.config.get(section, name) + + value = data_handler(value, dtype) + + if dim_y > 1: + # if data is an image, slice it according to the y dimensions + value = numpy.array(numpy.split(value, indices_or_sections=dim_y)) + + return value + + def write_function(value): + + if type(value) is list: + write_value = ", ".join([str(v) for v in value]) + + else: + write_value = str(value) + + self.config.read_file(self.config_file) + self.config.set(section, name, write_value) + fp = open(self.filename, 'w') + self.config.write(fp) + + return read_function, write_function + + def setup_attribute(self, annotation=None, attribute=None): + """ + MANDATORY function: is used by the attribute wrapper to get read/write functions. + must return the read and write functions + """ + + # process the comms_annotation + section, name = self._setup_annotation(annotation) + + # get all the necessary data to set up the read/write functions from the attribute_wrapper + dim_y, dim_x, dtype = self._setup_value_conversion(attribute) + + # configure and return the read/write functions + read_function, write_function = self._setup_mapping(name, section, dtype, dim_y, dim_x) + + # return the read/write functions + return read_function, write_function + +def data_handler(string, dtype): + value = [] + + if dtype is numpy.bool_: + # Handle special case for Bools + for i in string.split(","): + i = i.strip(" ") + if "True" == i: + value.append(True) + elif "False" == i: + value.append(False) + else: + raise ValueError("String to bool failed. String is not True/False, but is: '{}'".format(i)) + + value = dtype(value) + + elif dtype is numpy.str_: + for i in string.split(","): + val = numpy.str_(i) + value.append(val) + + value = numpy.array(value) + + else: + # regular case, go through the separator + for i in string.split(","): + i = i.replace(" ", "") + val = dtype(i) + value.append(val) + + + # convert values from buildin type to numpy type + value = dtype(value) + + return value diff --git a/devices/clients/opcua_connection.py b/devices/clients/opcua_connection.py index 85962f4216013af69733eeab145ce2d39e460a5b..f55922df8dba4ca5dbb6c78db5600a7287d5f9ad 100644 --- a/devices/clients/opcua_connection.py +++ b/devices/clients/opcua_connection.py @@ -1,6 +1,6 @@ from threading import Thread import socket -from src.comms_client import CommClient +from util.comms_client import CommClient import numpy import opcua from opcua import Client diff --git a/devices/clients/test_client.py b/devices/clients/test_client.py index b191d991ff85d3a201415224b0ee33572fcbd188..7465a6afca5f189b205748b01480f57a17ede63c 100644 --- a/devices/clients/test_client.py +++ b/devices/clients/test_client.py @@ -1,4 +1,4 @@ -from src.comms_client import CommClient +from util.comms_client import CommClient import numpy import os diff --git a/devices/HW_device_template.py b/devices/examples/HW_device_template.py similarity index 95% rename from devices/HW_device_template.py rename to devices/examples/HW_device_template.py index cd7880672caf41e65db841408353896a795c5549..dd6cad99bd5824bd1b8a37a471aeb7796f32a05d 100644 --- a/devices/HW_device_template.py +++ b/devices/examples/HW_device_template.py @@ -14,8 +14,8 @@ from tango.server import run from tango import AttrWriteType # Additional import -from src.attribute_wrapper import attribute_wrapper -from src.hardware_device import hardware_device +from util.attribute_wrapper import attribute_wrapper +from util.hardware_device import hardware_device __all__ = ["HW_dev"] diff --git a/devices/ini_device.py b/devices/ini_device.py new file mode 100644 index 0000000000000000000000000000000000000000..0b81611830e9ed14e76141ee583a1e6a7ddb8c60 --- /dev/null +++ b/devices/ini_device.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- +# +# This file wraps around a tango device class and provides a number of abstractions useful for hardware devices. It works together +# +# Distributed under the terms of the APACHE license. +# See LICENSE.txt for more info. + +""" + +""" + +# PyTango imports +from tango.server import run +from tango.server import device_property +from tango import AttrWriteType +from tango import DevState +# Additional import +from util.attribute_wrapper import attribute_wrapper +from util.hardware_device import hardware_device + + +import configparser +import numpy + +from clients.ini_client import * + + +__all__ = ["ini_device"] + + +def write_ini_file(filename): + with open(filename, 'w') as configfile: + + config = configparser.ConfigParser() + config['scalar'] = {} + config['scalar']['double_scalar_R'] = '1.2' + config['scalar']['bool_scalar_R'] = 'True' + config['scalar']['int_scalar_R'] = '5' + config['scalar']['str_scalar_R'] = 'this is a test' + + config['spectrum'] = {} + config['spectrum']['double_spectrum_R'] = '1.2, 2.3, 3.4, 4.5' + config['spectrum']['bool_spectrum_R'] = 'True, True, False, False' + config['spectrum']['int_spectrum_R'] = '1, 2, 3, 4' + config['spectrum']['str_spectrum_R'] = '"a", "b", "c", "d"' + + config['image'] = {} + config['image']['double_image_R'] = '1.2, 2.3, 3.4, 4.5, 5.6, 6.7' + config['image']['bool_image_R'] = 'True, True, False, False, True, False' + config['image']['int_image_R'] = '1, 2, 3, 4, 5, 6' + config['image']['str_image_R'] = '"a", "b", "c", "d", "e", "f"' + + config.write(configfile) + + + +class ini_device(hardware_device): + """ + This class is the minimal (read empty) implementation of a class using 'hardware_device' + """ + + # ---------- + # Attributes + # ---------- + """ + attribute wrapper objects can be declared here. All attribute wrapper objects will get automatically put in a list (attr_list) for easy access + + example = attribute_wrapper(comms_annotation="this is an example", datatype=numpy.double, dims=(8, 2), access=AttrWriteType.READ_WRITE) + ... + + """ + double_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "double_scalar_RW"}, datatype=numpy.double, access=AttrWriteType.READ_WRITE) + double_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "double_scalar_R"}, datatype=numpy.double) + bool_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "bool_scalar_RW"}, datatype=numpy.bool_, access=AttrWriteType.READ_WRITE) + bool_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "bool_scalar_R"}, datatype=numpy.bool_) + int_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "int_scalar_RW"}, datatype=numpy.int64, access=AttrWriteType.READ_WRITE) + int_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "int_scalar_R"}, datatype=numpy.int64) + str_scalar_RW = attribute_wrapper(comms_annotation={"section": "scalar", "name": "str_scalar_RW"}, datatype=numpy.str_, access=AttrWriteType.READ_WRITE) + str_scalar_R = attribute_wrapper(comms_annotation={"section": "scalar", "name": "str_scalar_R"}, datatype=numpy.str_) + + double_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "double_spectrum_RW"}, datatype=numpy.double, dims=(4,), access=AttrWriteType.READ_WRITE) + double_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "double_spectrum_R"}, datatype=numpy.double, dims=(4,)) + bool_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "bool_spectrum_RW"}, datatype=numpy.bool_, dims=(4,), access=AttrWriteType.READ_WRITE) + bool_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "bool_spectrum_R"}, datatype=numpy.bool_, dims=(4,)) + int_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "int_spectrum_RW"}, datatype=numpy.int64, dims=(4,), access=AttrWriteType.READ_WRITE) + int_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "int_spectrum_R"}, datatype=numpy.int64, dims=(4,)) + str_spectrum_RW = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "str_spectrum_RW"}, datatype=numpy.str_, dims=(4,), access=AttrWriteType.READ_WRITE) + str_spectrum_R = attribute_wrapper(comms_annotation={"section": "spectrum", "name": "str_spectrum_R"}, datatype=numpy.str_, dims=(4,)) + + double_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "double_image_RW"}, datatype=numpy.double, dims=(3, 2), access=AttrWriteType.READ_WRITE) + double_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "double_image_R"}, datatype=numpy.double, dims=(3, 2)) + bool_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "bool_image_RW"}, datatype=numpy.bool_, dims=(3, 2), access=AttrWriteType.READ_WRITE) + bool_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "bool_image_R"}, datatype=numpy.bool_, dims=(3, 2)) + int_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "int_image_RW"}, datatype=numpy.int64, dims=(3, 2), access=AttrWriteType.READ_WRITE) + int_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "int_image_R"}, datatype=numpy.int64, dims=(3, 2)) + str_image_RW = attribute_wrapper(comms_annotation={"section": "image", "name": "str_image_RW"}, datatype=numpy.str_, dims=(3, 2), access=AttrWriteType.READ_WRITE) + str_image_R = attribute_wrapper(comms_annotation={"section": "image", "name": "str_image_R"}, datatype=numpy.str_, dims=(3, 2)) + + # -------- + # overloaded functions + # -------- + def configure_for_initialise(self): + """ user code here. is called when the sate is set to INIT """ + """Initialises the attributes and properties of the PCC.""" + + # set up the OPC ua client + self.ini_client = ini_client("example.ini", self.Fault, self) + + # map an access helper class + for i in self.attr_list(): + i.set_comm_client(self.ini_client) + + self.ini_client.start() + + +# ---------- +# Run server +# ---------- +def main(args=None, **kwargs): + write_ini_file("example.ini") + + + """Main function of the hardware device module.""" + return run((ini_device,), args=args, **kwargs) + + +if __name__ == '__main__': + main() diff --git a/devices/setup.cfg b/devices/setup.cfg new file mode 100644 index 0000000000000000000000000000000000000000..c67fdf70f29eb8e2c5342e60876a097f4213ccdd --- /dev/null +++ b/devices/setup.cfg @@ -0,0 +1,13 @@ + +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + diff --git a/devices/test_device.py b/devices/test_device.py index 38f688cc0cb715667b8237c4358f32024ced582a..6a62907112ea1cf081436285aa0d21532ba24d0a 100644 --- a/devices/test_device.py +++ b/devices/test_device.py @@ -17,8 +17,8 @@ from tango import DevState # Additional import from clients.test_client import test_client -from src.attribute_wrapper import * -from src.hardware_device import * +from util.attribute_wrapper import * +from util.hardware_device import * __all__ = ["test_device", "main"] @@ -64,7 +64,7 @@ class test_device(hardware_device): # -------- # overloaded functions # -------- - def initialise(self): + def configure_for_initialise(self): """ user code here. is called when the sate is set to INIT """ """Initialises the attributes and properties of the PCC.""" diff --git a/lib/archiver.py b/devices/util/archiver.py old mode 100755 new mode 100644 similarity index 100% rename from lib/archiver.py rename to devices/util/archiver.py diff --git a/devices/src/attribute_wrapper.py b/devices/util/attribute_wrapper.py similarity index 95% rename from devices/src/attribute_wrapper.py rename to devices/util/attribute_wrapper.py index 98faf037c363fa92006bc656f821d636b5b396c0..b27187cda57a7d7238a677a085867011ad0ccb23 100644 --- a/devices/src/attribute_wrapper.py +++ b/devices/util/attribute_wrapper.py @@ -3,7 +3,7 @@ from tango import AttrWriteType import numpy -from src.wrappers import only_when_on, fault_on_error +from util.wrappers import only_when_on, fault_on_error import logging logger = logging.getLogger() @@ -61,8 +61,8 @@ class attribute_wrapper(attribute): if access == AttrWriteType.READ_WRITE: """ if the attribute is of READ_WRITE type, assign the RW and write function to it""" - @only_when_on - @fault_on_error + @only_when_on() + @fault_on_error() def read_RW(device): # print("read_RW {}, {}x{}, {}, {}".format(me.name, me.dim_x, me.dim_y, me.attr_type, me.value)) """ @@ -74,8 +74,8 @@ class attribute_wrapper(attribute): raise Exception("Attribute read_RW function error, attempted to read value_dict with key: `%s`, are you sure this exists?", self) from e - @only_when_on - @fault_on_error + @only_when_on() + @fault_on_error() def write_RW(device, value): """ _write_RW writes a value to this attribute @@ -91,8 +91,8 @@ class attribute_wrapper(attribute): else: """ if the attribute is of READ type, assign the read function to it""" - @only_when_on - @fault_on_error + @only_when_on() + @fault_on_error() def read_R(device): """ _read_R reads the attribute value, stores it and returns it" diff --git a/devices/src/comms_client.py b/devices/util/comms_client.py similarity index 100% rename from devices/src/comms_client.py rename to devices/util/comms_client.py diff --git a/lib/get_internal_attribute_history.py b/devices/util/get_internal_attribute_history.py old mode 100755 new mode 100644 similarity index 100% rename from lib/get_internal_attribute_history.py rename to devices/util/get_internal_attribute_history.py diff --git a/devices/src/hardware_device.py b/devices/util/hardware_device.py similarity index 89% rename from devices/src/hardware_device.py rename to devices/util/hardware_device.py index 431cbe9718fb3b586eea9a23aaa65aa1134fa59d..e0c9154c703a7cb82c42e9cdd7db76d68a011e05 100644 --- a/devices/src/hardware_device.py +++ b/devices/util/hardware_device.py @@ -16,12 +16,12 @@ from tango.server import Device, command from tango import DevState, DebugIt # Additional import -from src.attribute_wrapper import attribute_wrapper -from src.lofar_logging import log_exceptions +from util.attribute_wrapper import attribute_wrapper +from util.lofar_logging import log_exceptions __all__ = ["hardware_device"] -from src.wrappers import only_in_states +from util.wrappers import only_in_states, fault_on_error #@log_exceptions() class hardware_device(Device): @@ -74,6 +74,8 @@ class hardware_device(Device): @command() @only_in_states([DevState.FAULT, DevState.OFF]) @DebugIt() + @fault_on_error() + @log_exceptions() def Initialise(self): """ Command to ask for initialisation of this device. Can only be called in FAULT or OFF state. @@ -83,24 +85,27 @@ class hardware_device(Device): self.set_state(DevState.INIT) self.setup_value_dict() - self.initialise() + self.configure_for_initialise() self.set_state(DevState.STANDBY) @command() @only_in_states([DevState.STANDBY]) @DebugIt() + @fault_on_error() + @log_exceptions() def On(self): """ Command to ask for initialisation of this device. Can only be called in FAULT or OFF state. :return:None """ - self.on() + self.configure_for_on() self.set_state(DevState.ON) @command() @DebugIt() + @log_exceptions() def Off(self): """ Command to ask for shutdown of this device. @@ -114,7 +119,7 @@ class hardware_device(Device): # Turn off self.set_state(DevState.OFF) - self.off() + self.configure_for_off() # Turn off again, in case of race conditions through reconnecting self.set_state(DevState.OFF) @@ -122,6 +127,7 @@ class hardware_device(Device): @command() @only_in_states([DevState.ON, DevState.INIT, DevState.STANDBY]) @DebugIt() + @log_exceptions() def Fault(self): """ FAULT state is used to indicate our connection with the OPC-UA server is down. @@ -132,18 +138,18 @@ class hardware_device(Device): :return:None """ - self.fault() + self.configure_for_fault() self.set_state(DevState.FAULT) # functions that can be overloaded - def fault(self): + def configure_for_fault(self): pass - def off(self): + def configure_for_off(self): pass - def on(self): + def configure_for_on(self): pass - def initialise(self): + def configure_for_initialise(self): pass def always_executed_hook(self): diff --git a/lib/lofar2_config.py b/devices/util/lofar2_config.py old mode 100755 new mode 100644 similarity index 100% rename from lib/lofar2_config.py rename to devices/util/lofar2_config.py diff --git a/devices/src/lofar_logging.py b/devices/util/lofar_logging.py similarity index 98% rename from devices/src/lofar_logging.py rename to devices/util/lofar_logging.py index aa7d3633138679c63fd1934cf8d5638df7b1cedf..4bedad018047614c16d65b75816ac12dc7dbd7d0 100644 --- a/devices/src/lofar_logging.py +++ b/devices/util/lofar_logging.py @@ -1,5 +1,6 @@ import logging from functools import wraps +import sys # Always also log the hostname because it makes the origin of the log clear. import socket @@ -14,7 +15,7 @@ def configure_logger(logger: logging.Logger, log_extra=None): # log to the tcp_input of logstash in our ELK stack handler = AsynchronousLogstashHandler("elk", 5959, database_path='pending_log_messages.db') - # configure log messages + # configure log messages formatter = LogstashFormatter(extra=log_extra, tags=["python", "lofar"]) handler.setFormatter(formatter) diff --git a/lib/startup.py b/devices/util/startup.py old mode 100755 new mode 100644 similarity index 99% rename from lib/startup.py rename to devices/util/startup.py index f98097f994afc340fdb168311bcb524445658f1d..0f4bcbe702b1bd1edb873234763d56455b6009b4 --- a/lib/startup.py +++ b/devices/util/startup.py @@ -34,4 +34,3 @@ def startup(device: str, force_restart: bool): else: print("Device {} has successfully reached ON state.".format(device)) return proxy - diff --git a/devices/src/wrappers.py b/devices/util/wrappers.py similarity index 64% rename from devices/src/wrappers.py rename to devices/util/wrappers.py index 9dbc45a68dc850b36bd30a0a5b8664d104b58e30..5300ba2a06380599a3457c1624344243b7f5db60 100644 --- a/devices/src/wrappers.py +++ b/devices/util/wrappers.py @@ -22,32 +22,38 @@ def only_in_states(allowed_states): return wrapper -def only_when_on(func): +def only_when_on(): """ Wrapper to call and return the wrapped function if the device is in the ON state. Otherwise None is returned and nothing will be called. """ - @wraps(func) - def when_on_wrapper(self, *args, **kwargs): - if self.get_state() == DevState.ON: - return func(self, *args, **kwargs) + def inner(func): + @wraps(func) + def when_on_wrapper(self, *args, **kwargs): + if self.get_state() == DevState.ON: + return func(self, *args, **kwargs) + + return None - return None + return when_on_wrapper - return when_on_wrapper + return inner -def fault_on_error(func): +def fault_on_error(): """ Wrapper to catch exceptions. Sets the device in a FAULT state if any occurs. """ - @wraps(func) - def error_wrapper(self, *args, **kwargs): - try: - return func(self, *args, **kwargs) - except Exception as e: - self.error_stream("Function failed. Trace: %s", traceback.format_exc()) - self.Fault() - return None + def inner(func): + @wraps(func) + def error_wrapper(self, *args, **kwargs): + try: + return func(self, *args, **kwargs) + except Exception as e: + self.error_stream("Function failed. Trace: %s", traceback.format_exc()) + self.Fault() + return None + + return error_wrapper - return error_wrapper + return inner diff --git a/devices/versioneer.py b/devices/versioneer.py new file mode 100644 index 0000000000000000000000000000000000000000..1040c218924c06a246ea1bd872201a5c57744192 --- /dev/null +++ b/devices/versioneer.py @@ -0,0 +1,1855 @@ + +# Version: 0.19 + +"""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/python-versioneer/python-versioneer +* Brian Warner +* License: Public Domain +* Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3 +* [![Latest Version][pypi-image]][pypi-url] +* [![Build Status][travis-image]][travis-url] + +This is a tool for managing a recorded version number in distutils-based +python projects. The goal is to remove the tedious and error-prone "update +the embedded version string" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +* `pip install versioneer` to somewhere in your $PATH +* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) +* run `versioneer install` in your source tree, commit the results +* Verify version information with `python setup.py version` + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + "tarball from tag" feature +* a release tarball, produced by "setup.py sdist", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows + about recent "tags" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has +uncommitted changes). + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +See [INSTALL.md](./INSTALL.md) for detailed installation instructions. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default "pep440" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". + +* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the + commit date in ISO 8601 format. This will be None if the date is not + available. + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of "unknown". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an "about" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, "pep440", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional "local +version" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the +tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and +that this commit is two revisions ("+2") beyond the "0.11" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. "0.11". + +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of "0+unknown". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Known Limitations + +Some situations are known to cause problems for Versioneer. This details the +most significant ones. More can be found on Github +[issues page](https://github.com/python-versioneer/python-versioneer/issues). + +### Subprojects + +Versioneer has limited support for source trees in which `setup.py` is not in +the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are +two common reasons why `setup.py` might not be in the root: + +* Source trees which contain multiple subprojects, such as + [Buildbot](https://github.com/buildbot/buildbot), which contains both + "master" and "slave" subprojects, each with their own `setup.py`, + `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI + distributions (and upload multiple independently-installable tarballs). +* Source trees whose main purpose is to contain a C library, but which also + provide bindings to Python (and perhaps other languages) in subdirectories. + +Versioneer will look for `.git` in parent directories, and most operations +should get the right version string. However `pip` and `setuptools` have bugs +and implementation details which frequently cause `pip install .` from a +subproject directory to fail to find a correct version string (so it usually +defaults to `0+unknown`). + +`pip install --editable .` should work correctly. `setup.py install` might +work too. + +Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in +some later version. + +[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking +this issue. The discussion in +[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the +issue from the Versioneer side in more detail. +[pip PR#3176](https://github.com/pypa/pip/pull/3176) and +[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve +pip to let Versioneer work correctly. + +Versioneer-0.16 and earlier only looked for a `.git` directory next to the +`setup.cfg`, so subprojects were completely unsupported with those releases. + +### Editable installs with setuptools <= 18.5 + +`setup.py develop` and `pip install --editable .` allow you to install a +project into a virtualenv once, then continue editing the source code (and +test) without re-installing after every change. + +"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a +convenient way to specify executable scripts that should be installed along +with the python package. + +These both work as expected when using modern setuptools. When using +setuptools-18.5 or earlier, however, certain operations will cause +`pkg_resources.DistributionNotFound` errors when running the entrypoint +script, which must be resolved by re-installing the package. This happens +when the install happens with one version, then the egg_info data is +regenerated while a different version is checked out. Many setup.py commands +cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into +a different virtualenv), so this can be surprising. + +[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes +this one, but upgrading to a newer version of setuptools should probably +resolve it. + + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg`, if necessary, to include any new configuration settings + indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + +## Similar projects + +* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time + dependency +* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of + versioneer + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons "Public Domain +Dedication" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . + +[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg +[pypi-url]: https://pypi.python.org/pypi/versioneer/ +[travis-image]: +https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg +[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer + +""" + +import configparser +import errno +import json +import os +import re +import subprocess +import sys + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_root(): + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = ("Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND').") + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # "versioneer" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + me = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(me)[0]) + vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) + if me_dir != vsr_dir: + print("Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(me), versioneer_py)) + except NameError: + pass + return root + + +def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" + # This might raise EnvironmentError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks "VCS="). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + setup_cfg = os.path.join(root, "setup.cfg") + parser = configparser.ConfigParser() + with open(setup_cfg, "r") as f: + parser.read_file(f) + VCS = parser.get("versioneer", "VCS") # mandatory + + def get(parser, name): + if parser.has_option("versioneer", name): + return parser.get("versioneer", name) + return None + cfg = VersioneerConfig() + cfg.VCS = VCS + cfg.style = get(parser, "style") or "" + cfg.versionfile_source = get(parser, "versionfile_source") + cfg.versionfile_build = get(parser, "versionfile_build") + cfg.tag_prefix = get(parser, "tag_prefix") + if cfg.tag_prefix in ("''", '""'): + cfg.tag_prefix = "" + cfg.parentdir_prefix = get(parser, "parentdir_prefix") + cfg.verbose = get(parser, "verbose") + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = p.communicate()[0].strip().decode() + if p.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, p.returncode + return stdout, p.returncode + + +LONG_VERSION_PY['git'] = r''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.19 (https://github.com/python-versioneer/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" + git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" + git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "%(STYLE)s" + cfg.tag_prefix = "%(TAG_PREFIX)s" + cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" + cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + p = None + for c in commands: + try: + dispcmd = str([c] + args) + # remember shell=False, so use git.cmd on windows, not just git + p = subprocess.Popen([c] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %%s" %% dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %%s" %% (commands,)) + return None, None + stdout = p.communicate()[0].strip().decode() + if p.returncode != 0: + if verbose: + print("unable to run %%s (error)" %% dispcmd) + print("stdout was %%s" %% stdout) + return None, p.returncode + return stdout, p.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %%s but none started with prefix %%s" %% + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%%s', no digits" %% ",".join(refs - tags)) + if verbose: + print("likely tags: %%s" %% ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %%s" %% r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %%s not under git control" %% root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%%s*" %% tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%%s'" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%%s' doesn't start with prefix '%%s'" + print(fmt %% (full_tag, tag_prefix)) + pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" + %% (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], + cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post0.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post0.dev%%d" %% pieces["distance"] + else: + # exception #1 + rendered = "0.post0.dev%%d" %% pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%%s'" %% style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for i in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} +''' + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + f = open(versionfile_abs, "r") + for line in f.readlines(): + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + f.close() + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if not keywords: + raise NotThisMethod("no keywords at all, weird") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = set([r.strip() for r in refnames.strip("()").split(",")]) + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = set([r for r in refs if re.search(r'\d', r)]) + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + if verbose: + print("picking %s" % r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%s*" % tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%s'" + % describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" + % (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], + cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], + cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def do_vcs_install(manifest_in, versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-subst keyword substitution. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + files = [manifest_in, versionfile_source] + if ipy: + files.append(ipy) + try: + me = __file__ + if me.endswith(".pyc") or me.endswith(".pyo"): + me = os.path.splitext(me)[0] + ".py" + versioneer_file = os.path.relpath(me) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) + present = False + try: + f = open(".gitattributes", "r") + for line in f.readlines(): + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + f.close() + except EnvironmentError: + pass + if not present: + f = open(".gitattributes", "a+") + f.write("%s export-subst\n" % versionfile_source) + f.close() + files.append(".gitattributes") + run_command(GITS, ["add", "--"] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for i in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + else: + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %s but none started with prefix %s" % + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +SHORT_VERSION_PY = """ +# This file was generated by 'versioneer.py' (0.19) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +""" + + +def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" + try: + with open(filename) as f: + contents = f.read() + except EnvironmentError: + raise NotThisMethod("unable to read _version.py") + mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", + contents, re.M | re.S) + if not mo: + raise NotThisMethod("no version_json in _version.py") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" + os.unlink(filename) + contents = json.dumps(versions, sort_keys=True, + indent=1, separators=(",", ": ")) + with open(filename, "w") as f: + f.write(SHORT_VERSION_PY % contents) + + print("set %s to '%s'" % (filename, versions["version"])) + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post0.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post0.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post0.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +class VersioneerBadRootError(Exception): + """The project root directory is unknown or missing key files.""" + + +def get_versions(verbose=False): + """Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """ + if "versioneer" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules["versioneer"] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, "unrecognized VCS '%s'" % cfg.VCS + verbose = verbose or cfg.verbose + assert cfg.versionfile_source is not None, \ + "please set versioneer.versionfile_source" + assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print("got version from file %s %s" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get("pieces_from_vcs") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print("got version from VCS %s" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print("got version from parentdir %s" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print("unable to compute version") + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, "error": "unable to compute version", + "date": None} + + +def get_version(): + """Get the short version string for this project.""" + return get_versions()["version"] + + +def get_cmdclass(cmdclass=None): + """Get the custom setuptools/distutils subclasses used by Versioneer. + + If the package uses a different cmdclass (e.g. one from numpy), it + should be provide as an argument. + """ + if "versioneer" in sys.modules: + del sys.modules["versioneer"] + # this fixes the "python setup.py develop" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's "import versioneer". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/python-versioneer/python-versioneer/issues/52 + + cmds = {} if cmdclass is None else cmdclass.copy() + + # we add "version" to both distutils and setuptools + from distutils.core import Command + + class cmd_version(Command): + description = "report generated version string" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print("Version: %s" % vers["version"]) + print(" full-revisionid: %s" % vers.get("full-revisionid")) + print(" dirty: %s" % vers.get("dirty")) + print(" date: %s" % vers.get("date")) + if vers["error"]: + print(" error: %s" % vers["error"]) + cmds["version"] = cmd_version + + # we override "build_py" in both distutils and setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + # pip install: + # copies source tree to a tempdir before running egg_info/etc + # if .git isn't copied too, 'git describe' will fail + # then does setup.py bdist_wheel, or sometimes setup.py install + # setup.py egg_info -> ? + + # we override different "build_py" commands for both environments + if 'build_py' in cmds: + _build_py = cmds['build_py'] + elif "setuptools" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_py"] = cmd_build_py + + if "setuptools" in sys.modules: + from setuptools.command.build_ext import build_ext as _build_ext + else: + from distutils.command.build_ext import build_ext as _build_ext + + class cmd_build_ext(_build_ext): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_ext.run(self) + if self.inplace: + # build_ext --inplace will only build extensions in + # build/lib<..> dir with no _version.py to write to. + # As in place builds will already have a _version.py + # in the module dir, we do not need to write one. + return + # now locate _version.py in the new build/ directory and replace + # it with an updated value + target_versionfile = os.path.join(self.build_lib, + cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + cmds["build_ext"] = cmd_build_ext + + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + # nczeczulin reports that py2exe won't like the pep440-style string + # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. + # setup(console=[{ + # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION + # "product_version": versioneer.get_version(), + # ... + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["build_exe"] = cmd_build_exe + del cmds["build_py"] + + if 'py2exe' in sys.modules: # py2exe enabled? + from py2exe.distutils_buildexe import py2exe as _py2exe + + class cmd_py2exe(_py2exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _py2exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % + {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + cmds["py2exe"] = cmd_py2exe + + # we override different "sdist" commands for both environments + if 'sdist' in cmds: + _sdist = cmds['sdist'] + elif "setuptools" in sys.modules: + from setuptools.command.sdist import sdist as _sdist + else: + from distutils.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions["version"] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, + self._versioneer_generated_versions) + cmds["sdist"] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """ +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +""" + +SAMPLE_CONFIG = """ +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +""" + +INIT_PY_SNIPPET = """ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +""" + + +def do_setup(): + """Do main VCS-independent setup function for installing Versioneer.""" + root = get_root() + try: + cfg = get_config_from_root(root) + except (EnvironmentError, configparser.NoSectionError, + configparser.NoOptionError) as e: + if isinstance(e, (EnvironmentError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", + file=sys.stderr) + with open(os.path.join(root, "setup.cfg"), "a") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print(" creating %s" % cfg.versionfile_source) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write(LONG % {"DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + }) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), + "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except EnvironmentError: + old = "" + if INIT_PY_SNIPPET not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(INIT_PY_SNIPPET) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + + # Make sure both the top-level "versioneer.py" and versionfile_source + # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so + # they'll be copied into source distributions. Pip won't be able to + # install the package without this. + manifest_in = os.path.join(root, "MANIFEST.in") + simple_includes = set() + try: + with open(manifest_in, "r") as f: + for line in f: + if line.startswith("include "): + for include in line.split()[1:]: + simple_includes.add(include) + except EnvironmentError: + pass + # That doesn't cover everything MANIFEST.in can do + # (http://docs.python.org/2/distutils/sourcedist.html#commands), so + # it might give some false negatives. Appending redundant 'include' + # lines is safe, though. + if "versioneer.py" not in simple_includes: + print(" appending 'versioneer.py' to MANIFEST.in") + with open(manifest_in, "a") as f: + f.write("include versioneer.py\n") + else: + print(" 'versioneer.py' already in MANIFEST.in") + if cfg.versionfile_source not in simple_includes: + print(" appending versionfile_source ('%s') to MANIFEST.in" % + cfg.versionfile_source) + with open(manifest_in, "a") as f: + f.write("include %s\n" % cfg.versionfile_source) + else: + print(" versionfile_source already in MANIFEST.in") + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-subst keyword + # substitution. + do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" + found = set() + setters = False + errors = 0 + with open("setup.py", "r") as f: + for line in f.readlines(): + if "import versioneer" in line: + found.add("import") + if "versioneer.get_cmdclass()" in line: + found.add("cmdclass") + if "versioneer.get_version()" in line: + found.add("get_version") + if "versioneer.VCS" in line: + setters = True + if "versioneer.versionfile_source" in line: + setters = True + if len(found) != 3: + print("") + print("Your setup.py appears to be missing some important items") + print("(but I might be wrong). Please make sure it has something") + print("roughly like the following:") + print("") + print(" import versioneer") + print(" setup( version=versioneer.get_version(),") + print(" cmdclass=versioneer.get_cmdclass(), ...)") + print("") + errors += 1 + if setters: + print("You should remove lines like 'versioneer.VCS = ' and") + print("'versioneer.versionfile_source = ' . This configuration") + print("now lives in setup.cfg, and should be removed from setup.py") + print("") + errors += 1 + return errors + + +if __name__ == "__main__": + cmd = sys.argv[1] + if cmd == "setup": + errors = do_setup() + errors += scan_setup_py() + if errors: + sys.exit(1) diff --git a/docker-compose/archiver.yml b/docker-compose/archiver.yml index 38245d4933515d92ec2da41167f2404453e3a957..f471de6285a23e9d4969b4d840e9b83accaac22e 100644 --- a/docker-compose/archiver.yml +++ b/docker-compose/archiver.yml @@ -1,7 +1,7 @@ version: '2' services: - maria-db: + archiver-maria-db: image: ${DOCKER_REGISTRY_HOST}/${DOCKER_REGISTRY_USER}/mariadb_hdbpp:latest container_name: archiver-maria-db network_mode: ${NETWORK_MODE} @@ -23,7 +23,7 @@ services: depends_on: - databaseds - dsconfig - - maria-db + - archiver-maria-db environment: - TANGO_HOST=${TANGO_HOST} - HdbManager=archiving/hdbpp/confmanager01 @@ -40,7 +40,7 @@ services: depends_on: - databaseds - dsconfig - - maria-db + - archiver-maria-db environment: - TANGO_HOST=${TANGO_HOST} - HdbManager=archiving/hdbpp/confmanager01 diff --git a/docker-compose/hdbpp_viewer.yml b/docker-compose/hdbpp_viewer.yml index cf65548b25fca787e56a3ab03333daa09c12aa5b..7add56d90c4ced139c03df5d8a455b3db7132aab 100644 --- a/docker-compose/hdbpp_viewer.yml +++ b/docker-compose/hdbpp_viewer.yml @@ -16,7 +16,7 @@ services: depends_on: - databaseds - dsconfig - - maria-db + - archiver-maria-db - hdbpp-es - hdbpp-cm volumes: diff --git a/docker-compose/jupyter/Dockerfile b/docker-compose/jupyter/Dockerfile index 97ef7ca63daa60331ae0e8dee8f5d70fa143be44..da3f1da1c5bc09fa4a1861377d2533faca70535a 100644 --- a/docker-compose/jupyter/Dockerfile +++ b/docker-compose/jupyter/Dockerfile @@ -13,7 +13,7 @@ RUN sudo jupyter nbextension enable jupyter_bokeh --py --sys-prefix # Install profiles for ipython & jupyter COPY ipython-profiles /opt/ipython-profiles/ -RUN sudo chown tango.tango -R /opt/ipython-profiles +RUN sudo chmod a+rw -R /opt/ipython-profiles COPY jupyter-kernels /usr/local/share/jupyter/kernels/ # Install patched jupyter executable @@ -27,5 +27,6 @@ ADD https://github.com/krallin/tini/releases/download/${TINI_VERSION}/tini /usr/ RUN sudo chmod +x /usr/bin/tini # Make sure Jupyter can write to the home directory -ENV HOME=/home/tango -RUN chmod a+rwx /home/tango +ENV HOME=/home/user +RUN sudo mkdir -p $HOME +RUN sudo chmod a+rwx $HOME diff --git a/jupyter-notebooks/ini_device.ipynb b/jupyter-notebooks/ini_device.ipynb new file mode 100644 index 0000000000000000000000000000000000000000..ba365f263ca35e627b0430f26a02d53af059333a --- /dev/null +++ b/jupyter-notebooks/ini_device.ipynb @@ -0,0 +1,238 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 128, + "id": "waiting-chance", + "metadata": {}, + "outputs": [], + "source": [ + "import time\n", + "import numpy" + ] + }, + { + "cell_type": "code", + "execution_count": 146, + "id": "moving-alexandria", + "metadata": {}, + "outputs": [], + "source": [ + "d=DeviceProxy(\"LTS/ini_device/1\")" + ] + }, + { + "cell_type": "code", + "execution_count": 198, + "id": "ranking-aluminum", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Device is now in on state\n" + ] + } + ], + "source": [ + "state = str(d.state())\n", + "\n", + "if state == \"OFF\":\n", + " d.initialise()\n", + " time.sleep(1)\n", + "state = str(d.state())\n", + "if state == \"STANDBY\":\n", + " d.on()\n", + "state = str(d.state())\n", + "if state == \"ON\":\n", + " print(\"Device is now in on state\")\n" + ] + }, + { + "cell_type": "code", + "execution_count": 199, + "id": "beneficial-evidence", + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "double_scalar_RW [0.]\n", + "double_scalar_R [1.2]\n", + "bool_scalar_RW [False]\n", + "bool_scalar_R [ True]\n", + "int_scalar_RW [0]\n", + "int_scalar_R [5]\n", + "str_scalar_RW ('',)\n", + "str_scalar_R ('this is',)\n", + "double_spectrum_RW [0. 0. 0. 0.]\n", + "double_spectrum_R [1.2 2.3 3.4 4.5]\n", + "bool_spectrum_RW [False False False False]\n", + "bool_spectrum_R [ True True False False]\n", + "int_spectrum_RW [0 0 0 0]\n", + "int_spectrum_R [1 2 3 4]\n", + "str_spectrum_RW ('', '', '', '')\n", + "str_spectrum_R ('\"a\"', ' \"b\"', ' \"c\"', ' \"d\"')\n", + "double_image_RW [[0. 0. 0.]\n", + " [0. 0. 0.]]\n", + "double_image_R [[1.2 2.3 3.4]\n", + " [4.5 5.6 6.7]]\n", + "bool_image_RW [[False False False]\n", + " [False False False]]\n", + "bool_image_R [[ True True False]\n", + " [False True False]]\n", + "int_image_RW [[0 0 0]\n", + " [0 0 0]]\n", + "int_image_R [[1 2 3]\n", + " [4 5 6]]\n", + "str_image_RW (('', '', ''), ('', '', ''))\n", + "str_image_R (('\"a\"', ' \"b\"', ' \"c\"'), (' \"d\"', ' \"e\"', ' \"f\"'))\n", + "State <function __get_command_func.<locals>.f at 0x7f3efee95c80>\n", + "Status <function __get_command_func.<locals>.f at 0x7f3efee95c80>\n" + ] + } + ], + "source": [ + "attr_names = d.get_attribute_list()\n", + "\n", + "for i in attr_names:\n", + " try:\n", + " exec(\"print(i, d.{})\".format(i))\n", + " except:\n", + " pass\n" + ] + }, + { + "cell_type": "code", + "execution_count": 93, + "id": "sharing-mechanics", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "array([0])" + ] + }, + "execution_count": 93, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "d.int_scalar_RW" + ] + }, + { + "cell_type": "code", + "execution_count": 203, + "id": "2f03759a", + "metadata": {}, + "outputs": [], + "source": [ + "d.str_image_RW = [[\"1\", \"2\", \"3\"],[\"4\", \"5\", \"6\"]]" + ] + }, + { + "cell_type": "code", + "execution_count": 204, + "id": "3187f3bb", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(('1', '2', '3'), ('4', '5', '6'))" + ] + }, + "execution_count": 204, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "d.str_image_RW" + ] + }, + { + "cell_type": "code", + "execution_count": 192, + "id": "eb406dce", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "\"['a', 'b', 'c', 'd', 'e', 'f']\"" + ] + }, + "execution_count": 192, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "numpy.str_([\"a\", \"b\", \"c\", \"d\", \"e\", \"f\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 197, + "id": "7b270085", + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "6" + ] + }, + "execution_count": 197, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "array = []\n", + "string = '\"a\", \"b\", \"c\", \"d\", \"e\", \"f\"'\n", + "\n", + "for i in string.split(\",\"):\n", + " value = numpy.str_(i)\n", + " array.append(value)\n", + "\n", + "len(array)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "id": "69ecc437", + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "StationControl", + "language": "python", + "name": "stationcontrol" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.7.3" + } + }, + "nbformat": 4, + "nbformat_minor": 5 +}