43 SECS_IN_HOUR = (SECS_IN_MIN * 60)
44 SECS_IN_DAY = (SECS_IN_HOUR * 24)
45 SECS_IN_WEEK = (SECS_IN_DAY * 7)
46 SECS_IN_MONTH = (SECS_IN_DAY * 30)
62 then_time = int(then_time)
63 if then_time == 0
or then_time == -1:
66 then_tuple = time.localtime(then_time)
68 now_tuple = time.localtime(time.time())
70 if day
or (then_tuple[kHourPos]==0
and then_tuple[kMinutePos]==0
and then_tuple[kSecondPos]==0):
72 if then_tuple[kYearPos] == now_tuple[kYearPos]:
74 return time.strftime(
"%m/%d",then_tuple)
77 return time.strftime(
"%m/%d/%Y",then_tuple)
79 return time.strftime(
"%m/%d/%Y",then_tuple)
83 return time.strftime(
"%m/%d/%Y %I:%M%p",then_tuple)
86 def hdfExport(self, prefix, hdf_dataset, *extra, **extranamed):
87 skip_fields = extranamed.get(
"skip_fields",
None)
88 no_escape = extranamed.get(
"no_escape",
None)
89 translate_dict = extranamed.get(
"translate_dict",
None)
90 tz = extranamed.get(
"tz",
"US/Pacific")
92 for col_name,value
in self.items():
93 if skip_fields
and (col_name
in skip_fields):
96 name,col_type,col_options = self._table.getColumnDef(col_name)
98 col_type = odb.kVarString
101 if (value
is not None):
102 if col_options.get(
"no_export",0):
continue 103 if type(value)
in [ type(0), type(0L) ]:
104 hdf_dataset.setValue(prefix +
"." + col_name,
"%d" % value)
105 elif type(value) == type(1.0):
106 if int(value) == value:
107 hdf_dataset.setValue(prefix +
"." + col_name,
"%d" % value)
109 hdf_dataset.setValue(prefix +
"." + col_name,
"%0.2f" % value)
111 if col_type == odb.kReal:
112 log(
"why are we here with this value: %s" % value)
114 for k,v
in translate_dict.items():
115 value = string.replace(value,k,v)
116 if no_escape
and col_name
in no_escape:
117 hdf_dataset.setValue(prefix +
"." + col_name,str(value))
119 hdf_dataset.setValue(prefix +
"." + col_name,neo_cgi.htmlEscape(str(value)))
120 if col_options.get(
"int_date",0):
121 hdf_dataset.setValue(prefix +
"." + col_name +
".string",
renderDate(value))
122 hdf_dataset.setValue(prefix +
"." + col_name +
".day_string",
renderDate(value,day=1))
123 hdf_dataset.setValue(prefix +
"." + col_name +
".date_string",
renderDate(value,date=1))
126 neo_cgi.exportDate(hdf_dataset,
"%s.%s" % (prefix, col_name), tz, value)
130 if col_options.has_key(
"enum_values"):
131 enum = col_options[
"enum_values"]
132 hdf_dataset.setValue(prefix +
"." + col_name +
".enum",
133 str(enum.get(value,
'')))
136 def hdfExport(self,prefix,hdf_dataset,*extra,**extranamed):
137 export_by = extranamed.get(
"export_by",
None)
140 if export_by
is not None:
142 row.hdfExport(
"%s.%d" % (prefix,n),hdf_dataset,*extra,**extranamed)
146 hdf.setValue(prefix+
".0", str(len(lst)))
147 for n
in range(len(lst)):
148 hdf.setValue(prefix+
".%d" %(n+1), lst[n]);
152 for n
in range(hdf.getIntValue(name,0)):
153 lst.append(hdf.getValue(name+
".%d" %(n+1),
""))
160 cs.parseStr(a_cs_string)
163 return "Error in CS tags: %s" % neo_cgi.htmlEscape(repr(a_cs_string))
187 if name: o = hdf.getObj(name)
199 if name: o = hdf.getObj(name)
204 results.append((o.name(), o.value()))
214 self.
node = self.hdf.child()
223 self.
node = self.node.next()
229 if not self.
node:
raise StopIteration
231 ret = (self.node.name(), self.node.value())
238 if not self.
node:
raise StopIteration
240 ret = self.node.name()
247 if not self.
node:
raise StopIteration
249 ret = (self.node.name(), self.
node)
256 for k,v
in dict.items():
257 hdf.setValue(prefix +
"." + str(k), str(v))
264 hdf.setValue(prefix +
"." + str(n), str(item))
267 lt = time.localtime(ttime)
272 hdf.setValue(prefix +
".hour", str(hour))
273 hdf.setValue(prefix +
".sec", str(am))
275 hdf.setValue(prefix +
".24hour", str(lt.tm_hour))
276 hdf.setValue(prefix +
".min",
"%02d" % lt.tm_min)
277 hdf.setValue(prefix +
".sec",
"%02d" % lt.tm_sec)
279 hdf.setValue(prefix +
".mday", str(lt.tm_mday))
280 hdf.setValue(prefix +
".mon", str(lt.tm_mon))
281 hdf.setValue(prefix +
".year", str(lt.tm_year))
282 hdf.setValue(prefix +
".2yr", str(lt.tm_year)[2:4])
283 hdf.setValue(prefix +
".wday", str(lt.tm_wday))
285 hdf.setValue(prefix +
".tzoffset",
"0")
291 t = guid.extract_time(guidStr)
292 hdf.setValue(prefix +
".time_t", str(t))
293 exportDate(hdf, prefix +
".time",
"US/Pacific", t)
294 hdf.setValue(prefix +
".random", str(guid.extract_random(guidStr)))
295 hdf.setValue(prefix +
".server_id", str(guid.extract_ip(guidStr)))
296 except guid.InvalidGUID:
303 if size > (1024*1024*1024):
304 ret_size =
"%.1f" % (size / (1024*1024*1024.))
306 s =
"%s%s" % (ret_size, units)
307 elif size > (1024*1024):
308 ret_size =
"%.1f" % (size / (1024*1024.))
310 s =
"%s%s" % (ret_size, units)
312 ret_size =
"%.1f" % (size / (1024.))
316 if ret_size.endswith(
".0"): ret_size = ret_size[:-2]
317 s =
"%s%s" % (ret_size, units)
318 hdf.setValue(prefix +
".str", s)
325 hdf.setValue(
"foo",
"1")
326 print eval_cs(hdf,
"this should say 1 ===> <?cs var:foo ?>")
329 if __name__ ==
"__main__":
def setList(hdf, prefix, lst)
def exportDate(hdf, prefix, tz, ttime)
def hdfExport(self, prefix, hdf_dataset, extra, extranamed)
def hdfExport(self, prefix, hdf_dataset, extra, extranamed)
def renderSize(prefix, hdf, size)
def defaultRowListClass(self)
def renderDate(then_time, day=0, date=1)
def eval_cs(hdf, a_cs_string)
def loopHDF(hdf, name=None)
def hdfExportList(prefix, hdf, list)
def hdfExportDict(prefix, hdf, dict)
def loopKVHDF(hdf, name=None)
def exportGUID(prefix, hdf, guidStr)
def defaultRowClass(self)