3 This file contains the base DataSource class, and all sub classes that implement their own methods for parsing data. 6 from __future__
import print_function
23 new_node =
node(node_data)
24 self._child_nodes.append(new_node)
42 return "<node data='%s' children=%s>" % (self.
data(),
str(self.
children()))
53 return "<data_source>" 61 _data, _sub_data, _file_name =
None,
None,
None 66 contents = "".
join(handle.readlines())
67 data = json.loads(contents)
72 return json_data_node.make(self.
_data)
81 _data, _sub_data, _file_name =
None,
None,
None 87 cursor = connection.cursor()
88 if query_object ==
None:
90 tables = cursor.execute(
"select name from sqlite_master where type = 'table'")
94 for table
in tables.fetchall():
95 table_to_columns[table[0]] = []
97 columns = cursor.execute(
"pragma table_info(%s)" % table[0])
98 for column
in columns.fetchall():
99 table_to_columns[table[0]].
append(
str(column[1]))
103 for table
in table_to_columns:
105 column_string =
",".
join(table_to_columns[table])
106 sql_query =
"select %s from %s" % (column_string, table)
107 results = cursor.execute(sql_query).fetchall()
108 for n
in range(0, len(results)):
109 results[n] =
dict(
zip(table_to_columns[table],
map(str, results[n])))
110 table_to_data[
str(table)] = results
111 self.
_data = json_data_node.make(table_to_data)
113 sql_query = query_object.to_sql()
131 if isinstance(data, list):
133 elif isinstance(data, dict):
145 current_json_node = self
147 data_to_use = current_json_node.data()[args[0]]
148 return json_data_node.make(data_to_use)
150 current_json_node = current_json_node.get(key)
151 return current_json_node
161 if isinstance(self.
_data, type_name):
162 lists.append(self.
_data)
163 if isinstance(self.
_data, list):
164 for item
in self.
_data:
165 lists += json_data_node.make(item).
find(type_name)
166 elif isinstance(self.
_data, dict):
167 for key
in self.
_data:
168 lists += json_data_node.make(self.
_data[key]).
find(type_name)
172 return "<json_data_node data='%s'>" %
str(self.
_data)
176 iterator_index =
None 179 self.
_data = data
if data !=
None else []
187 data = self.
get(len(self.
data())-1)
191 if data.__class__.__name__
in [
"json_list",
"json_dict",
"json_basic"]:
193 self._data.append(data)
215 for index
in indices:
219 final_list.append(self.
get(index).
data())
225 return json_data_node.make(final_list)
229 if not(type(member_name)
in [str, unicode]):
230 raise TypeError(
"Value given for member name must be a string.")
231 type_of_first_item = self.
data()[0].__class__
232 for item
in self.
data():
233 if item.__class__ != type_of_first_item:
235 return json_data_node.make(
map(
lambda item : getattr(item, member_name), self.
data()))
241 if len(self.
data()) == 0:
242 print(
"\nNo data to convert to dictionaries.\n")
245 if self.
get(0).
data().__class__.__name__
in [
"GlobalTag",
"GlobalTagMap",
"Tag",
"IOV",
"Payload"]:
247 new_data =
map(
lambda item : item.as_dicts(convert_timestamps=convert_timestamps), [item
for item
in self.
data()])
250 print(
"Data in json_list was not the correct type.")
257 def as_table(self, fit=["all"], columns=None, hide=None, col_width=None, row_nums=False):
259 if len(self.
data()) == 0:
260 print(
"\nNo data to draw table with.\n")
268 if self.
get(0).
data().__class__.__name__
in [
"GlobalTag",
"GlobalTagMap",
"GlobalTagMapRequest",
"Tag",
"IOV",
"Payload"]:
270 from data_formats
import _objects_to_dicts
273 from querying
import connection
278 headers = [header
for header
in models_dict[self.
get(0).
data().__class__.__name__.lower()].headers]
283 headers = data[0].
keys()
289 headers = [
"row"] + headers
292 for i, item
in enumerate(data):
293 data[i][
"row"] =
str(i)
298 if col_width ==
None:
300 table_width =
int(0.95*
int(subprocess.check_output([
"stty",
"size"]).
split(
" ")[1]))
301 col_width =
int(table_width/len(headers))
304 for n
in range(0, len(hide)):
305 del headers[headers.index(hide[n])]
307 def max_width_of_column(column, data):
308 max_width_found = len(
str(data[0][column]))
310 current_width = len(
str(item[column]))
311 if current_width > max_width_found:
312 max_width_found = current_width
313 if max_width_found > len(column):
314 return max_width_found
318 def cell(content, header, col_width, fit):
320 col_width_with_padding = col_width+2
321 col_width_substring = len(
str(content))
323 col_width_with_padding = col_width-2
if col_width-2 > 0
else 1
324 col_width_substring = col_width-5
if col_width-7 > 0
else 1
325 return (
"| {:<%s} " % (col_width_with_padding)).
format(
str(content)[0:col_width_substring].
replace(
"\n",
"")\
326 + (
"..." if not(fit)
and col_width_substring < len(
str(content))
else ""))
336 if not(column
in headers):
337 print(
"'%s' is not a valid column." % column)
340 column_to_width[column] = max_width_of_column(column, data)
341 surplus_width += column_to_width[column]-col_width
343 if len(
set(headers)-
set(fit)) != 0:
344 non_fited_width_surplus = surplus_width/len(
set(headers)-
set(fit))
346 non_fited_width_surplus = 0
348 for column
in headers:
349 if not(column
in fit):
350 column_to_width[column] = col_width - non_fited_width_surplus
352 for column
in headers:
353 column_to_width[column] = max_width_of_column(column, data)
355 ascii_string =
"\n%s\n\n" % table_name
if table_name !=
None else "\n" 356 for header
in headers:
357 ascii_string += cell(header, header, column_to_width[header], header
in fit)
359 horizontal_border =
"\n" 360 ascii_string += horizontal_border
362 for n
in range(0, len(headers)):
363 entry = item[headers[n]]
364 ascii_string += cell(entry, headers[n], column_to_width[headers[n]], headers[n]
in fit)
367 ascii_string += horizontal_border
368 ascii_string +=
"Showing %d rows\n\n" % len(data)
374 self.
_data = data
if data !=
None else {}
377 if data.__class__.__name__
in [
"json_list",
"json_dict",
"json_basic"]:
379 self.
_data[key] = data
385 self.
_data = data
if data !=
None else "" def add_child(self, node_data)
def __init__(self, data=None)
def get_members(self, member_name)
def replace(string, replacements)
def add_key(self, data, key)
def as_dicts(self, convert_timestamps=False)
S & print(S &os, JobReport::InputFile const &f)
def as_table(self, fit=["all"], columns=None, hide=None, col_width=None, row_nums=False)
def generate(map_blobs=False, class_name=None)
def class_name_to_column(cls)
OutputIterator zip(InputIterator1 first1, InputIterator1 last1, InputIterator2 first2, InputIterator2 last2, OutputIterator result, Compare comp)
def __init__(self, data=None)
def find(self, type_name)
def __init__(self, json_file_name)
def __init__(self, data=None)
def __init__(self, data=None)
static std::string join(char **cmd)
def __init__(self, data=None)
def __init__(self, sqlite_file_name)
def indices(self, indices)
def add_child(self, data)