From b671247041f780ff0a097f1d23d634e843f9206b Mon Sep 17 00:00:00 2001 From: Documentation Bot Date: Fri, 27 Oct 2017 10:53:38 +0000 Subject: [PATCH] Generated gh-pages for commit 0cd61ac0 Author: William H.P. Nielsen add debugging and make the final write more robust (#822) --- _modules/qcodes/data/data_set.html | 29 +++++++++----- _modules/qcodes/data/format.html | 8 +++- _modules/qcodes/data/gnuplot_format.html | 23 +++++++++-- _modules/qcodes/loops.html | 38 +++++++++++++------ api/generated/qcodes.DataSet.html | 2 +- ...codes.instrument_drivers.signal_hound.html | 4 +- .../qcodes.instrument_drivers.tektronix.html | 4 +- 7 files changed, 76 insertions(+), 32 deletions(-) diff --git a/_modules/qcodes/data/data_set.html b/_modules/qcodes/data/data_set.html index 740f77fb498..b0677abaa25 100644 --- a/_modules/qcodes/data/data_set.html +++ b/_modules/qcodes/data/data_set.html @@ -176,6 +176,8 @@

Source code for qcodes.data.data_set

 from .location import FormatLocation
 from qcodes.utils.helpers import DelegateAttributes, full_class, deep_update
 
+log = logging.getLogger(__name__)
+
 
 
[docs]def new_data(location=None, loc_record=None, name=None, overwrite=False, io=None, **kwargs): @@ -411,14 +413,14 @@

Source code for qcodes.data.data_set

         Args:
             delay (float): seconds between iterations. Default 1.5
         """
-        logging.info(
+        log.info(
             'waiting for DataSet <{}> to complete'.format(self.location))
 
         failing = {key: False for key in self.background_functions}
 
         completed = False
         while True:
-            logging.info('DataSet: {:.0f}% complete'.format(
+            log.info('DataSet: {:.0f}% complete'.format(
                 self.fraction_complete() * 100))
 
             # first check if we're done
@@ -429,13 +431,13 @@ 

Source code for qcodes.data.data_set

             # because we want things like live plotting to get the final data
             for key, fn in list(self.background_functions.items()):
                 try:
-                    logging.debug('calling {}: {}'.format(key, repr(fn)))
+                    log.debug('calling {}: {}'.format(key, repr(fn)))
                     fn()
                     failing[key] = False
                 except Exception:
-                    logging.info(format_exc())
+                    log.info(format_exc())
                     if failing[key]:
-                        logging.warning(
+                        log.warning(
                             'background function {} failed twice in a row, '
                             'removing it'.format(key))
                         del self.background_functions[key]
@@ -447,7 +449,7 @@ 

Source code for qcodes.data.data_set

             # but only sleep if we're not already finished
             time.sleep(delay)
 
-        logging.info('DataSet <{}> is complete'.format(self.location))
+        log.info('DataSet <{}> is complete'.format(self.location))
 
     def get_changes(self, synced_indices):
         """
@@ -552,8 +554,11 @@ 

Source code for qcodes.data.data_set

         self.last_store = time.time()
         if (self.write_period is not None and
                 time.time() > self.last_write + self.write_period):
+            log.debug('Attempting to write')
             self.write()
             self.last_write = time.time()
+        else:
+            log.debug('.store method: This is not the right time to write')
 
     def default_parameter_name(self, paramname='amplitude'):
         """ Return name of default parameter for plotting
@@ -629,7 +634,7 @@ 

Source code for qcodes.data.data_set

             return
         self.formatter.read_metadata(self)
 
-    def write(self, write_metadata=False):
+    def write(self, write_metadata=False, only_complete=True):
         """
         Writes updates to the DataSet to storage.
         N.B. it is recommended to call data_set.finalize() when a DataSet is
@@ -637,6 +642,9 @@ 

Source code for qcodes.data.data_set

 
         Args:
             write_metadata (bool): write the metadata to disk
+            only_complete (bool): passed on to the match_save_range inside
+                self.formatter.write. Used to ensure that all new data gets
+                saved even when some columns are strange.
         """
         if self.location is False:
             return
@@ -644,7 +652,8 @@ 

Source code for qcodes.data.data_set

         self.formatter.write(self,
                              self.io,
                              self.location,
-                             write_metadata=write_metadata)
+                             write_metadata=write_metadata,
+                             only_complete=only_complete)
 
     def write_copy(self, path=None, io_manager=None, location=None):
         """
@@ -725,7 +734,9 @@ 

Source code for qcodes.data.data_set

         Also closes the data file(s), if the ``Formatter`` we're using
         supports that.
         """
-        self.write()
+        log.debug('Finalising the DataSet. Writing.')
+        # write all new data, not only (to?) complete columns
+        self.write(only_complete=False)
 
         if hasattr(self.formatter, 'close_file'):
             self.formatter.close_file(self)
diff --git a/_modules/qcodes/data/format.html b/_modules/qcodes/data/format.html
index 87b0acdcbd2..95ce64fd6bb 100644
--- a/_modules/qcodes/data/format.html
+++ b/_modules/qcodes/data/format.html
@@ -213,7 +213,7 @@ 

Source code for qcodes.data.format

     ArrayGroup = namedtuple('ArrayGroup', 'shape set_arrays data name')
 
     def write(self, data_set, io_manager, location, write_metadata=True,
-              force_write=False):
+              force_write=False, only_complete=True):
         """
         Write the DataSet to storage.
 
@@ -228,6 +228,8 @@ 

Source code for qcodes.data.format

             location (str): the file location within the io_manager.
             write_metadata (bool): if True, then the metadata is written to disk
             force_write (bool): if True, then the data is written to disk
+            only_complete (bool): Used only by the gnuplot formatter's
+                overridden version of this method
         """
         raise NotImplementedError
 
@@ -355,7 +357,9 @@ 

Source code for qcodes.data.format

 
         Returns:
             Tuple(int, int): the first and last raveled indices that should
-                be saved.
+                be saved. Returns None if:
+                    * no data is present
+                    * no new data can be found
         """
         inner_setpoint = group.set_arrays[-1]
         full_dim_data = (inner_setpoint, ) + group.data
diff --git a/_modules/qcodes/data/gnuplot_format.html b/_modules/qcodes/data/gnuplot_format.html
index bba5e0c9ddb..26bf939a0ab 100644
--- a/_modules/qcodes/data/gnuplot_format.html
+++ b/_modules/qcodes/data/gnuplot_format.html
@@ -167,12 +167,16 @@ 

Source code for qcodes.data.gnuplot_format

 import re
 import math
 import json
+import logging
 
 from qcodes.utils.helpers import deep_update, NumpyJSONEncoder
 from .data_array import DataArray
 from .format import Formatter
 
 
+log = logging.getLogger(__name__)
+
+
 
[docs]class GNUPlotFormat(Formatter): """ Saves data in one or more gnuplot-format files. We make one file for @@ -404,7 +408,8 @@

Source code for qcodes.data.gnuplot_format

             parts = re.split('"\s+"', labelstr[1:-1])
             return [l.replace('\\"', '"').replace('\\\\', '\\') for l in parts]
 
-    def write(self, data_set, io_manager, location, force_write=False, write_metadata=True):
+    def write(self, data_set, io_manager, location, force_write=False,
+              write_metadata=True, only_complete=True):
         """
         Write updates in this DataSet to storage.
 
@@ -414,6 +419,11 @@ 

Source code for qcodes.data.gnuplot_format

             data_set (DataSet): the data we're storing
             io_manager (io_manager): the base location to write to
             location (str): the file location within io_manager
+            only_complete (bool): passed to match_save_range, answers the
+                following question: Should we write all available new data,
+                or only complete rows? Is used to make sure that everything
+                gets written when the DataSet is finalised, even if some
+                dataarrays are strange (like, full of nans)
         """
         arrays = data_set.arrays
 
@@ -422,16 +432,20 @@ 

Source code for qcodes.data.gnuplot_format

         existing_files = set(io_manager.list(location))
         written_files = set()
 
-        # Every group gets it's own datafile
+        # Every group gets its own datafile
         for group in groups:
+            log.debug('Attempting to write the following '
+                      'group: {}'.format(group))
             fn = io_manager.join(location, group.name + self.extension)
 
             written_files.add(fn)
 
             file_exists = fn in existing_files
-            save_range = self.match_save_range(group, file_exists)
+            save_range = self.match_save_range(group, file_exists,
+                                               only_complete=only_complete)
 
             if save_range is None:
+                log.debug('Cannot match save range, skipping this group.')
                 continue
 
             overwrite = save_range[0] == 0 or force_write
@@ -441,6 +455,7 @@ 

Source code for qcodes.data.gnuplot_format

             with io_manager.open(fn, open_mode) as f:
                 if overwrite:
                     f.write(self._make_header(group))
+                    log.debug('Wrote header to file')
 
                 for i in range(save_range[0], save_range[1] + 1):
                     indices = np.unravel_index(i, shape)
@@ -456,7 +471,7 @@ 

Source code for qcodes.data.gnuplot_format

 
                     one_point = self._data_point(group, indices)
                     f.write(self.separator.join(one_point) + self.terminator)
-
+                    log.debug('Wrote to file')
             # now that we've saved the data, mark it as such in the data.
             # we mark the data arrays and the inner setpoint array. Outer
             # setpoint arrays have different dimension (so would need a
diff --git a/_modules/qcodes/loops.html b/_modules/qcodes/loops.html
index 2ab639fbd1c..266be493724 100644
--- a/_modules/qcodes/loops.html
+++ b/_modules/qcodes/loops.html
@@ -228,9 +228,11 @@ 

Source code for qcodes.loops

 
 log = logging.getLogger(__name__)
 
+
 def active_loop():
     return ActiveLoop.active_loop
 
+
 def active_data_set():
     loop = active_loop()
     if loop is not None and loop.data_set is not None:
@@ -238,6 +240,7 @@ 

Source code for qcodes.loops

     else:
         return None
 
+
 
[docs]class Loop(Metadatable): """ The entry point for creating measurement loops @@ -945,16 +948,16 @@

Source code for qcodes.loops

             return action
 
     def _run_wrapper(self, *args, **kwargs):
-        # try:
-        self._run_loop(*args, **kwargs)
-        # finally:
-        if hasattr(self, 'data_set'):
-            # TODO (giulioungaretti) WTF?
-            # somehow this does not show up in the data_set returned by
-            # run(), but it is saved to the metadata
-            ts = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
-            self.data_set.add_metadata({'loop': {'ts_end': ts}})
-            self.data_set.finalize()
+        try:
+            self._run_loop(*args, **kwargs)
+        finally:
+            if hasattr(self, 'data_set'):
+                # TODO (giulioungaretti) WTF?
+                # somehow this does not show up in the data_set returned by
+                # run(), but it is saved to the metadata
+                ts = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
+                self.data_set.add_metadata({'loop': {'ts_end': ts}})
+                self.data_set.finalize()
 
     def _run_loop(self, first_delay=0, action_indices=(),
                   loop_indices=(), current_values=(),
@@ -1000,12 +1003,15 @@ 

Source code for qcodes.loops

             new_values = current_values + (value,)
             data_to_store = {}
 
-            if hasattr(self.sweep_values, "parameters"): # combined parameter
+            if hasattr(self.sweep_values, "parameters"):  # combined parameter
                 set_name = self.data_set.action_id_map[action_indices]
                 if hasattr(self.sweep_values, 'aggregate'):
                     value = self.sweep_values.aggregate(*set_val)
+                log.debug('Calling .store method of DataSet because '
+                          'sweep_values.parameters exist')
                 self.data_set.store(new_indices, {set_name: value})
-                for j, val in enumerate(set_val): # set_val list of values to set [param1_setpoint, param2_setpoint ..]
+                # set_val list of values to set [param1_setpoint, param2_setpoint ..]
+                for j, val in enumerate(set_val):
                     set_index = action_indices + (j+n_callables, )
                     set_name = (self.data_set.action_id_map[set_index])
                     data_to_store[set_name] = val
@@ -1013,6 +1019,8 @@ 

Source code for qcodes.loops

                 set_name = self.data_set.action_id_map[action_indices]
                 data_to_store[set_name] = value
 
+            log.debug('Calling .store method of DataSet because a sweep step'
+                      ' was taken')
             self.data_set.store(new_indices, data_to_store)
 
             if not self._nest_first:
@@ -1021,6 +1029,8 @@ 

Source code for qcodes.loops

 
             try:
                 for f in callables:
+                    log.debug('Going through callables at this sweep step.'
+                              ' Calling {}'.format(f))
                     f(first_delay=delay,
                       loop_indices=new_indices,
                       current_values=new_values)
@@ -1054,14 +1064,18 @@ 

Source code for qcodes.loops

 
         # run the background task one last time to catch the last setpoint(s)
         if self.bg_task is not None:
+            log.debug('Running the background task one last time.')
             self.bg_task()
 
         # the loop is finished - run the .then actions
+        log.debug('Finishing loop, running the .then actions...')
         for f in self._compile_actions(self.then_actions, ()):
+            log.debug('...running .then action {}'.format(f))
             f()
 
         # run the bg_final_task from the bg_task:
         if self.bg_final_task is not None:
+            log.debug('Running the bg_final_task')
             self.bg_final_task()
 
     def _wait(self, delay):
diff --git a/api/generated/qcodes.DataSet.html b/api/generated/qcodes.DataSet.html
index 5bd171d299c..3336f7b4313 100644
--- a/api/generated/qcodes.DataSet.html
+++ b/api/generated/qcodes.DataSet.html
@@ -298,7 +298,7 @@ 

qcodes.DataSetsync() Synchronize this DataSet with the DataServer or storage. -write([write_metadata]) +write([write_metadata, only_complete]) Writes updates to the DataSet to storage. write_copy([path, io_manager, location]) diff --git a/api/generated/qcodes.instrument_drivers.signal_hound.html b/api/generated/qcodes.instrument_drivers.signal_hound.html index f55d1e3ea3c..f6d721537bd 100644 --- a/api/generated/qcodes.instrument_drivers.signal_hound.html +++ b/api/generated/qcodes.instrument_drivers.signal_hound.html @@ -318,12 +318,12 @@

Submodules
-saStatus = {'saUnknownErr': -666, 'saNoError': 0, 'saParameterClamped': 3, 'saCompressionWarning': 2, 'saDeviceNotOpenErr': -3, 'saFrequencyRangeErr': -99, 'saInvalidModeErr': -7, 'saInvalidScaleErr': -94, 'saInvalidDetectorErr': -95, 'saOvenColdErr': -20, 'saDeviceNotConfiguredErr': -6, 'saExternalReferenceNotFound': -89, 'saNotConfiguredErr': -6, 'saInternetErr': -12, 'saNullPtrErr': -1, 'saUSBCommErr': -11, 'saTrackingGeneratorNotFound': -10, 'saDeviceNotIdleErr': -9, 'saInvalidParameterErr': -4, 'saTooManyDevicesErr': -5, 'saDeviceNotFoundErr': -8, 'saBandwidthClamped': 4, 'saInvalidDeviceErr': -2, 'saBandwidthErr': -91, 'saNoCorrections': 1}
+saStatus = {'saInvalidParameterErr': -4, 'saNoError': 0, 'saNotConfiguredErr': -6, 'saInvalidDetectorErr': -95, 'saInvalidDeviceErr': -2, 'saNullPtrErr': -1, 'saInternetErr': -12, 'saParameterClamped': 3, 'saNoCorrections': 1, 'saDeviceNotOpenErr': -3, 'saDeviceNotIdleErr': -9, 'saDeviceNotConfiguredErr': -6, 'saBandwidthClamped': 4, 'saUnknownErr': -666, 'saInvalidModeErr': -7, 'saDeviceNotFoundErr': -8, 'saBandwidthErr': -91, 'saOvenColdErr': -20, 'saTrackingGeneratorNotFound': -10, 'saFrequencyRangeErr': -99, 'saExternalReferenceNotFound': -89, 'saInvalidScaleErr': -94, 'saUSBCommErr': -11, 'saCompressionWarning': 2, 'saTooManyDevicesErr': -5}
-saStatus_inverted = {0: 'saNoError', 1: 'saNoCorrections', 2: 'saCompressionWarning', 3: 'saParameterClamped', 4: 'saBandwidthClamped', -2: 'saInvalidDeviceErr', -99: 'saFrequencyRangeErr', -95: 'saInvalidDetectorErr', -94: 'saInvalidScaleErr', -91: 'saBandwidthErr', -666: 'saUnknownErr', -89: 'saExternalReferenceNotFound', -20: 'saOvenColdErr', -12: 'saInternetErr', -11: 'saUSBCommErr', -10: 'saTrackingGeneratorNotFound', -9: 'saDeviceNotIdleErr', -8: 'saDeviceNotFoundErr', -7: 'saInvalidModeErr', -6: 'saNotConfiguredErr', -5: 'saTooManyDevicesErr', -4: 'saInvalidParameterErr', -3: 'saDeviceNotOpenErr', -1: 'saNullPtrErr'}
+saStatus_inverted = {0: 'saNoError', 1: 'saNoCorrections', 2: 'saCompressionWarning', 3: 'saParameterClamped', 4: 'saBandwidthClamped', -99: 'saFrequencyRangeErr', -95: 'saInvalidDetectorErr', -94: 'saInvalidScaleErr', -91: 'saBandwidthErr', -666: 'saUnknownErr', -89: 'saExternalReferenceNotFound', -20: 'saOvenColdErr', -12: 'saInternetErr', -1: 'saNullPtrErr', -10: 'saTrackingGeneratorNotFound', -9: 'saDeviceNotIdleErr', -8: 'saDeviceNotFoundErr', -7: 'saInvalidModeErr', -6: 'saDeviceNotConfiguredErr', -5: 'saTooManyDevicesErr', -4: 'saInvalidParameterErr', -3: 'saDeviceNotOpenErr', -2: 'saInvalidDeviceErr', -11: 'saUSBCommErr'}
diff --git a/api/generated/qcodes.instrument_drivers.tektronix.html b/api/generated/qcodes.instrument_drivers.tektronix.html index 0cdee37188c..a6bb3357d3e 100644 --- a/api/generated/qcodes.instrument_drivers.tektronix.html +++ b/api/generated/qcodes.instrument_drivers.tektronix.html @@ -269,12 +269,12 @@

Submodules
-AWG_FILE_FORMAT_CHANNEL = {'MARKER1_OFFSET_N': 'd', 'ANALOG_FILTER_N': 'h', 'OUTPUT_WAVEFORM_NAME_N': 's', 'MARKER1_SKEW_N': 'd', 'MARKER2_OFFSET_N': 'd', 'PHASE_N': 'd', 'DIGITAL_METHOD_N': 'h', 'DIGITAL_OFFSET_N': 'd', 'DIGITAL_HIGH_N': 'd', 'DIGITAL_AMPLITUDE_N': 'd', 'EXTERNAL_ADD_N': 'h', 'MARKER1_AMPLITUDE_N': 'd', 'PHASE_DELAY_INPUT_METHOD_N': 'h', 'MARKER2_AMPLITUDE_N': 'd', 'MARKER1_HIGH_N': 'd', 'DIGITAL_LOW_N': 'd', 'MARKER2_METHOD_N': 'h', 'ANALOG_AMPLITUDE_N': 'd', 'DC_OUTPUT_LEVEL_N': 'd', 'MARKER1_METHOD_N': 'h', 'ANALOG_HIGH_N': 'd', 'MARKER1_LOW_N': 'd', 'MARKER2_HIGH_N': 'd', 'DELAY_IN_POINTS_N': 'd', 'ANALOG_DIRECT_OUTPUT_N': 'h', 'CHANNEL_SKEW_N': 'd', 'MARKER2_SKEW_N': 'd', 'ANALOG_METHOD_N': 'h', 'CHANNEL_STATE_N': 'h', 'ANALOG_OFFSET_N': 'd', 'MARKER2_LOW_N': 'd', 'ANALOG_LOW_N': 'd', 'DELAY_IN_TIME_N': 'd'}
+AWG_FILE_FORMAT_CHANNEL = {'CHANNEL_SKEW_N': 'd', 'MARKER2_LOW_N': 'd', 'ANALOG_OFFSET_N': 'd', 'DIGITAL_LOW_N': 'd', 'MARKER2_AMPLITUDE_N': 'd', 'MARKER1_HIGH_N': 'd', 'MARKER1_OFFSET_N': 'd', 'DELAY_IN_TIME_N': 'd', 'DIGITAL_AMPLITUDE_N': 'd', 'DIGITAL_OFFSET_N': 'd', 'MARKER1_LOW_N': 'd', 'MARKER2_SKEW_N': 'd', 'PHASE_N': 'd', 'ANALOG_FILTER_N': 'h', 'OUTPUT_WAVEFORM_NAME_N': 's', 'ANALOG_HIGH_N': 'd', 'DELAY_IN_POINTS_N': 'd', 'MARKER1_METHOD_N': 'h', 'ANALOG_DIRECT_OUTPUT_N': 'h', 'ANALOG_LOW_N': 'd', 'EXTERNAL_ADD_N': 'h', 'MARKER2_HIGH_N': 'd', 'ANALOG_METHOD_N': 'h', 'CHANNEL_STATE_N': 'h', 'DC_OUTPUT_LEVEL_N': 'd', 'DIGITAL_HIGH_N': 'd', 'MARKER2_OFFSET_N': 'd', 'ANALOG_AMPLITUDE_N': 'd', 'DIGITAL_METHOD_N': 'h', 'MARKER2_METHOD_N': 'h', 'PHASE_DELAY_INPUT_METHOD_N': 'h', 'MARKER1_SKEW_N': 'd', 'MARKER1_AMPLITUDE_N': 'd'}

-AWG_FILE_FORMAT_HEAD = {'REFERENCE_CLOCK_FREQUENCY_SELECTION': 'h', 'TRIGGER_SOURCE': 'h', 'REFERENCE_SOURCE': 'h', 'HOLD_REPETITION_RATE': 'h', 'TRIGGER_INPUT_IMPEDANCE': 'h', 'DIVIDER_RATE': 'h', 'WAIT_VALUE': 'h', 'INTERLEAVE': 'h', 'TRIGGER_INPUT_POLARITY': 'h', 'RUN_MODE': 'h', 'TRIGGER_INPUT_SLOPE': 'h', 'RUN_STATE': 'h', 'INTERLEAVE_ADJ_AMPLITUDE': 'd', 'SAMPLING_RATE': 'd', 'EVENT_INPUT_POLARITY': 'h', 'ZEROING': 'h', 'INTERNAL_TRIGGER_RATE': 'd', 'REPETITION_RATE': 'd', 'JUMP_TIMING': 'h', 'EXTERNAL_REFERENCE_TYPE': 'h', 'EVENT_INPUT_THRESHOLD': 'd', 'TRIGGER_INPUT_THRESHOLD': 'd', 'CLOCK_SOURCE': 'h', 'EVENT_INPUT_IMPEDANCE': 'h', 'COUPLING': 'h', 'INTERLEAVE_ADJ_PHASE': 'd', 'REFERENCE_MULTIPLIER_RATE': 'h'}
+AWG_FILE_FORMAT_HEAD = {'ZEROING': 'h', 'INTERLEAVE_ADJ_PHASE': 'd', 'EXTERNAL_REFERENCE_TYPE': 'h', 'EVENT_INPUT_THRESHOLD': 'd', 'HOLD_REPETITION_RATE': 'h', 'EVENT_INPUT_POLARITY': 'h', 'WAIT_VALUE': 'h', 'REFERENCE_CLOCK_FREQUENCY_SELECTION': 'h', 'COUPLING': 'h', 'INTERLEAVE_ADJ_AMPLITUDE': 'd', 'REFERENCE_SOURCE': 'h', 'REPETITION_RATE': 'd', 'TRIGGER_INPUT_IMPEDANCE': 'h', 'DIVIDER_RATE': 'h', 'TRIGGER_INPUT_POLARITY': 'h', 'INTERLEAVE': 'h', 'INTERNAL_TRIGGER_RATE': 'd', 'RUN_MODE': 'h', 'TRIGGER_INPUT_THRESHOLD': 'd', 'SAMPLING_RATE': 'd', 'RUN_STATE': 'h', 'CLOCK_SOURCE': 'h', 'JUMP_TIMING': 'h', 'TRIGGER_SOURCE': 'h', 'REFERENCE_MULTIPLIER_RATE': 'h', 'EVENT_INPUT_IMPEDANCE': 'h', 'TRIGGER_INPUT_SLOPE': 'h'}