Repository for Petra's work at ampli Jan-Feb 2019

downkwh.py 1.1KB

123456789101112131415161718192021222324252627282930313233343536373839
  1. from util import getQuery, pickleQuery
  2. import pandas as p
  3. import matplotlib.pyplot as plt
  4. import seaborn as sns
  5. query = """
  6. SELECT comb.icp_id, comb.read_time, COALESCE(kwh_tot, 0) AS kwh_tot
  7. FROM
  8. (
  9. SELECT read_time, icp_id
  10. FROM
  11. (
  12. SELECT read_time
  13. FROM GENERATE_SERIES('2017-04-01 00:30:00'::timestamp, '2017-05-01 00:00:00'::timestamp,
  14. '30 minutes'::interval) read_time
  15. ) AS tsdata CROSS JOIN
  16. (
  17. SELECT *
  18. FROM
  19. (
  20. SELECT icp_id, COUNT(DISTINCT read_date) AS data_days
  21. FROM coup_prd.coupdatamaster
  22. WHERE read_date >= to_date('01/01/2017','dd/mm/yyyy')
  23. AND read_date < to_date('01/01/2018','dd/mm/yyyy')
  24. AND content_code = 'UN'
  25. GROUP BY icp_id
  26. ) AS cir
  27. WHERE data_days >= 360
  28. ) AS qual_icp
  29. ) AS comb
  30. LEFT JOIN
  31. (
  32. SELECT *, read_date + CONCAT(period / 2, ':', period %% 2 * 30, ':00')::time AS read_time
  33. FROM public.coup_tall_april
  34. ) AS tall_timestamp
  35. ON comb.read_time = tall_timestamp.read_time AND comb.icp_id = tall_timestamp.icp_id;
  36. """
  37. pickleQuery(query, "../data/April.pkl")