Repository for Petra's work at ampli Jan-Feb 2019

downkwh.py 1.2KB

12345678910111213141516171819202122232425262728293031323334353637383940
  1. from util import getQuery, pickleQuery
  2. import pandas as p
  3. import matplotlib.pyplot as plt
  4. import seaborn as sns
  5. query = """
  6. SELECT comb.icp_id, comb.read_time, COALESCE(kwh_tot, 0) AS kwh_tot
  7. FROM
  8. (
  9. SELECT read_time, icp_id
  10. FROM
  11. (
  12. SELECT read_time
  13. FROM GENERATE_SERIES('2017-01-01 00:30:00'::timestamp, '2017-02-01 00:00:00'::timestamp,
  14. '30 minutes'::interval) read_time
  15. ) AS tsdata CROSS JOIN
  16. (
  17. SELECT *
  18. FROM
  19. (
  20. SELECT icp_id, COUNT(DISTINCT read_date) AS data_days
  21. FROM coup_prd.coupdatamaster
  22. WHERE read_date >= to_date('01/01/2017','dd/mm/yyyy')
  23. AND read_date < to_date('01/01/2018','dd/mm/yyyy')
  24. AND content_code = 'UN'
  25. AND icp_id LIKE '%%19'
  26. GROUP BY icp_id
  27. ) AS cir
  28. WHERE data_days >= 360
  29. ) AS qual_icp
  30. ) AS comb
  31. LEFT JOIN
  32. (
  33. SELECT *, read_date + CONCAT(period / 2, ':', period %% 2 * 30, ':00')::time AS read_time
  34. FROM public.coup_tall_jan
  35. ) AS tall_timestamp
  36. ON comb.read_time = tall_timestamp.read_time AND comb.icp_id = tall_timestamp.icp_id;
  37. """
  38. pickleQuery(query, "../data/jan19s.pkl")