Repository for Petra's work at ampli Jan-Feb 2019

downkwh.py 1.4KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647
  1. from util import getQuery, pickleQuery, getkwh
  2. import pandas as p
  3. import matplotlib.pyplot as plt
  4. import seaborn as sns
  5. # query = """
  6. # SELECT comb.icp_id, comb.read_time, COALESCE(kwh_tot, 0) AS kwh_tot
  7. # FROM
  8. # (
  9. # SELECT read_time, icp_id
  10. # FROM
  11. # (
  12. # SELECT read_time
  13. # FROM GENERATE_SERIES('2017-01-01 00:30:00'::timestamp, '2017-02-01 00:00:00'::timestamp,
  14. # '30 minutes'::interval) read_time
  15. # ) AS tsdata CROSS JOIN
  16. # (
  17. # SELECT *
  18. # FROM
  19. # (
  20. # SELECT icp_id, COUNT(DISTINCT read_date) AS data_days
  21. # FROM coup_prd.coupdatamaster
  22. # WHERE read_date >= to_date('01/01/2017','dd/mm/yyyy')
  23. # AND read_date < to_date('01/01/2018','dd/mm/yyyy')
  24. # AND content_code = 'UN'
  25. # AND icp_id LIKE '%%19'
  26. # GROUP BY icp_id
  27. # ) AS cir
  28. # WHERE data_days >= 360
  29. # ) AS qual_icp
  30. # ) AS comb
  31. # LEFT JOIN
  32. # (
  33. # SELECT *, read_date + CONCAT(period / 2, ':', period %% 2 * 30, ':00')::time AS read_time
  34. # FROM public.coup_tall_jan
  35. # ) AS tall_timestamp
  36. # ON comb.read_time = tall_timestamp.read_time AND comb.icp_id = tall_timestamp.icp_id;
  37. # """
  38. #
  39. # pickleQuery(query, "../data/jan19s.pkl")
  40. kwhdata = getkwh('2018-01-01', '2018-04-01', '2018-01-01 00:30:00', '2018-04-01 00:00:00', '%%1')
  41. print(kwhdata.info())
  42. print("Pickling")
  43. kwhdata.to_pickle("../data/2018-proj-sample.pkl")