Merge pull request 'Fix emails + cleanup + minor improvements' (#1) from cleanup into master

Reviewed-on: #1
This commit is contained in:
radex 2024-07-25 07:50:47 +00:00
commit 1d1b4b5541
35 changed files with 656 additions and 2113 deletions

3
.gitignore vendored
View file

@ -1,6 +1,5 @@
olddata
webapp/data.db
web/webapp/data.db
**/data.db
*pyc
*sublime*
kasownik.ini

View file

@ -14,4 +14,4 @@ ADD web /usr/src/web
ADD fetch /usr/src/fetch
STOPSIGNAL SIGINT
CMD ["uwsgi", "--http-socket", "0.0.0.0:5000", "--plugins", "python3", "--wsgi", "webapp.wsgi:app", "--threads", "10", "--master"]
CMD ["uwsgi", "--http-socket", "0.0.0.0:5000", "--wsgi", "webapp.wsgi:app", "--threads", "10", "--master"]

View file

@ -1,25 +1,12 @@
Kasownik
========
Warsaw Hackerspace Membership Management System.
# Kasownik
> „100 linii pythona!” - enki o skrypcie do składek
Warsaw Hackerspace Membership Management System
Summary
-------
## Summary
This project is divided into two separate modules:
* `web` - web frontend and basic logic, public-facing service
* `fetch` - bank account data fetcher, to be run in some secure domain
(at least separate UID) - supports "old" IdeaBank web interface
* `fetch` - bank account data fetcher
More info about these can be found in their respective `README.md` files.
Quick Start
-----------
1. [Register new SSO application](https://sso.hackerspace.pl/client/create) - client name and URI don't matter, redirect URI should be `http://localhost:5000/oauth/callback` (by default), other settings can stay default
2. Set `SPACEAUTH_CONSUMER_KEY` and `SPACEAUTH_CONSUMER_SECRET` envs to the client generated above
3. `docker-compose run --rm kasownik-web ./manage.py syncdb` (one time)
4. Run the app: `docker-compose up --build`
5. (TODO: Add missing table for fetcher, add example data)

View file

@ -1,23 +1,11 @@
Fetcher
=======
# kasownik-fetch
This is a separate process used for fetching of bank account data from IdeaBank.
This is a separete process used for fetching bank account data.
Raw transfer data is fetched into `raw_transfers` table (using credentails
separate from web interface worker) and are forwarded into `transfers` with
trigger and stored procedure (check it out in `triggers.sql`)
This process has its own `config.py` (example available in `config.py.dist`).
This process has its own `config.ini` (example available in `config.ini.dist`).
`raw_transfers` table schema is presented when executing:
python banking-ib.py --print-schema
gRPC proto files (`smsgw_pb2*.py`) can be rebuilt using:
python -m grpc_tools.protoc -Iprotos --python_out=. --grpc_python_out=. protos/smsgw.proto
TODO
----
* Cleanup logging
NOTE: See git history for old scraper (IdeaBank, BRE bank) implementations and SMSGW

View file

@ -1,668 +0,0 @@
#!/usr/bin/env/python2
# -*- coding: utf-8 -*-
# Copyright (c) 2017, Remigiusz Marcinkiewicz <remigiusz@marcinkiewicz.me>
# Based on iBRE/mBank CompanyNet crawler by Sergiusz Bazanski <q3k@q3k.org>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from datetime import date, datetime, timedelta
import sys
from time import sleep, time
import csv
import os
import random
import re
import logging
import logging.config
import argparse
import enum
import bs4
import requests
import grpc
from sqlalchemy import Column, Integer, String, Date, create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.schema import CreateTable, CreateIndex
from six.moves import input
import smsgw_pb2
import smsgw_pb2_grpc
from config import CurrentConfig
config = {
key: getattr(CurrentConfig, key)
for key in dir(CurrentConfig) if key.isupper()}
Base = declarative_base()
if config.get('LOGGING'):
logging.config.dictConfig(config['LOGGING'])
else:
logging.basicConfig(level=logging.DEBUG)
class RawTransfer(Base):
__tablename__ = 'raw_transfer'
id = Column(Integer, primary_key=True)
raw = Column(String(512))
uid = Column(String(128), index=True)
on_account = Column(String(32), index=True)
amount = Column(Integer)
currency = Column(String(8))
date = Column(Date)
type = Column(String(16))
index = Column(Integer)
title = Column(String(256))
balance = Column(Integer)
balance_currency = Column(String(8))
from_account = Column(String(32))
to_account = Column(String(32))
from_name = Column(String(256))
to_name = Column(String(256))
class IBParseError(Exception):
pass
class IBMaintenanceError(Exception):
pass
class IBRow(RawTransfer):
SECRET = config["SECRET"]
OWN_ACCOUNTS = config["OWN_ACCOUNTS"]
def __unicode__(self):
return u"{} *{} #{} @{} -\"{}\" -#{} => +\"{}\" +#{} [{}.{:02d} {}] ({}.{:02d} {}) ~\"{}\"".format(
self.type, self.index, self.on_account, self.date, self.from_name, self.from_account,
self.to_name, self.to_account, self.amount/100, self.amount%100,
self.currency, self.balance/100, self.balance%100, self.balance_currency, self.title)
def __str__(self):
return unicode(self).encode("utf-8")
def __repr__(self):
return str(self)
def __init__(self, row, on_account, raw):
self.raw = raw.decode('utf-8')
self.uid = row[IBField.uid]
self.index = 1
self.date = datetime.strptime(row[IBField.date_completed], "%Y%m%d").date()
self.title = row[IBField.title]
af = re.compile(r"([0-9]+)\.([0-9]{2})")
m = af.match(row[IBField.amount])
if m is None:
raise IBParseError("Can't parse amount value \"{}\"".format(row[IBField.amount]), row)
a, b = m.groups()
self.amount = int(a)*100+int(b)
self.currency = row[IBField.currency]
own_account = IBParser.parse_account_number(row[IBField.own_account])
own_name = "Stowarzyszenie \"Warszawski Hackerspace\""
if own_account not in self.OWN_ACCOUNTS:
raise IBParseError("own_account {} not in OWN_ACCOUNTS - format change?".format(own_account))
self.on_account = own_account
other_account = IBParser.parse_account_number(row[IBField.other_account])
if other_account is None:
raise IBParseError("other_account {} could not be parsed".format(row[IBField.other_account]))
other_name = row[IBField.other_name]
direction = row[IBField.direction]
if direction == "uznanie":
direction = "IN"
self.type = "IN"
elif direction == u"Obiciążenie": # sic!
direction = "OUT"
self.type = "OUT"
else:
raise IBParseError("Can't parse direction specifier \"{}\"", direction)
if own_account == other_account:
self.type = "BANK_FEE"
self.from_account = self.to_account = own_account
self.from_name = self.to_name = own_name
elif own_account in self.OWN_ACCOUNTS and other_account in self.OWN_ACCOUNTS:
self.from_name = self.to_name = own_name
if direction == "IN":
self.type = "IN_FROM_OWN"
self.from_account = other_account
self.to_account = own_account
elif direction == "OUT":
self.type = "OUT_TO_OWN"
self.from_account = own_account
self.to_account = other_account
else:
raise IBParseError("Can't figure out details of an own-to-own transfer")
elif direction == "IN":
self.type = "IN"
self.from_account = other_account
self.to_account = own_account
self.from_name = other_name
self.to_name = own_name
elif direction == "OUT":
self.type = "OUT"
self.from_account = own_account
self.to_account = other_account
self.from_name = own_name
self.to_name = other_name
else:
raise IBParseError("Can't figure out transfer type for current row", row)
if None in (self.type, self.to_account, self.from_account, self.to_name, self.from_name):
raise IBParseError(
"Something went wrong - one of the mandatory values empty",
self.type, self.to_account, self.from_account,
self.to_name, self.from_name)
class IBField(enum.Enum):
#Data waluty;Data zlecenia;Numer rachunku nadawcy;Numer banku nadawcy;Kwota w walucie rachunku;Waluta;Kurs;Kwota w walucie zlecenia;Numer rachunku odbiorcy;Odbiorca;Numer banku odbiorcy;Tytuł;Obciążenie/uznanie;Numer transakcji w systemie centralnym;
date_completed = u"Data waluty"
date_issued = u"Data zlecenia"
own_account = u"Numer rachunku nadawcy"
own_bank = u"Numer banku nadawcy"
amount = u"Kwota w walucie rachunku"
currency = u"Waluta"
rate = u"Kurs"
transfer_amount = "Kwota w walucie zlecenia"
other_account = u"Numer rachunku odbiorcy"
other_name = u"Odbiorca"
other_bank = u"Numer banku odbiorcy"
title = u"Tytuł"
direction = u"Obciążenie/uznanie"
uid = u"Numer transakcji w systemie centralnym"
class IBParser(object):
def __init__(self, account_number):
self.account_number = account_number
self.rows = []
self.fields = []
def parse(self, snapshot):
# Patch #01: successful utf8 test and CSV separator injection
kek = u"IMPLR - STARVING - SKŁADKA ;".encode("utf-8")
snapshot = snapshot.replace(kek, kek[:-1])
# Patch #02: newline in internal investment deposit transaction title
snapshot = snapshot.replace('\n-', ' -')
lines = snapshot.splitlines()
header = lines.pop(0).decode("utf-8").split(";")
if header[-1]:
if 'Przepraszamy strona chwilowo niedostępna' in snapshot:
raise IBMaintenanceError(snapshot)
raise IBParseError("Last column no longer empty? %r", header)
header = header[:-1]
for hf in header:
try:
self.fields.append(IBField(hf))
except ValueError as e:
raise IBParseError("Unexpected field name \"{}\"".format(hf), e)
c = csv.reader(reversed(lines), delimiter=";")
for row in c:
row = row[:-1]
if len(row) != len(self.fields):
raise IBParseError("Row has {} fields, {} expected after parsing the header: \"{}\"".format(len(row), len(self.fields), ';'.join(row)))
d = dict(zip(self.fields, [r.decode("utf-8") for r in row]))
r = IBRow(d, self.account_number, ";".join(row))
self.rows.append(r)
def get(self, type_=None, on_account=None):
return [
row for row in self.rows
if (row.type == type_ or type_ is None)
and (row.on_account == on_account or on_account is None)]
@staticmethod
def parse_account_number(s):
formats = [
"((?:[A-Za-z]{2})?[0-9]{2})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})", # 26 digits, optional country code - Poland
"((?:[A-Za-z]{2})?[0-9]{2})[ ]?([A-Z]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{2})", # 22 characters including BIC bank code - Ireland
"((?:[A-Za-z]{2})?[0-9]{2})[ ]?([A-Z]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{2})", # 18 characters including BIC bank code - Netherlands
"((?:[A-Za-z]{2})?[0-9]{2})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{2})", # 22 digits - Germany
]
for f in formats:
m = re.search(f, s)
if m is not None:
break
if m is None:
return None
account = "".join(m.groups())
if len(m.group(1)) == 2:
account = "PL" + account
return account
class IBFetcher(object):
BASE = "https://secure.ideabank.pl/"
START_DATE = "01.11.2016"
def __init__(self):
self.logger = logging.getLogger(self.__class__.__name__)
self._soup = None
self.token = None
self.s = requests.Session()
self.s.headers.update({
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:50.0) Gecko/20100101 Firefox/50.0",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate",
"DNT": "1",
"Upgrade-Insecure-Requests": "1",
"Pragma": "no-cache",
"Cache-Control": "no-cache"
})
def _makesoup(self, data):
self._soup = bs4.BeautifulSoup(data)
return self._soup
def _dump(self):
fn = config["DUMP_FILE"]
self.logger.warning("Dumping the last page to %f", fn)
open(fn, 'w').write(unicode(self._soup).encode('utf-8'))
def _getraw(self, page, params={}):
url = self.BASE + page
r = self.s.get(url, params=params,
timeout=config.get('DEFAULT_TIMEOUT', 3600))
self.logger.debug("GET %s?%s -> %d", page, "&".join([
str(k)+"="+str(v) for k, v in params.items()]), r.status_code)
if r.status_code != 200:
raise Exception("return code %i" % r.status_code)
return r
def _get(self, page):
r = self._getraw(page)
self.s.headers.update({"Referer": r.url})
soup = self._makesoup(r.text)
self._gettoken(soup)
self._hitjstoken(soup)
return soup
def _postraw(self, page, data):
url = self.BASE + page
h = self.s.headers.copy()
h.update({
"Content-Type": "application/x-www-form-urlencoded",
"X-Requested-With": "XMLHttpRequest",
})
r = self.s.post(url, data, timeout=config.get('DEFAULT_TIMEOUT', 3600))
self.logger.debug("POST %s -> %d", page, r.status_code)
if r.status_code != 200:
self._dump()
raise Exception("return code %i" % r.status_code)
return r
def _post(self, page, data):
mdata = {}
mdata["banking"] = self.token
mdata.update(data)
r = self._postraw(page, mdata)
if re.search("forbidden",r.text) is not None:
self._dump()
raise Exception("Received \"forbidden3\" response. Bad token?")
self.s.headers.update({"Referer": r.url})
soup = self._makesoup(r.text)
self._gettoken(soup)
self._hitjstoken(soup)
return soup
def _wait(self, seconds):
self.logger.debug("Waiting %d seconds", seconds)
sleep(seconds)
def _gettoken(self, soup):
i = soup.find("input", type="hidden", attrs={"name": "banking"})
m = re.search("changeBanking\(\'([0-9a-fA-F]+)\'\)", str(soup))
if i is not None and i["value"] is not None:
t = i["value"]
elif m is not None:
t = m.group(1)
else:
t = None
if t is not None:
self.token = t
self.logger.debug("Token: %s", self.token)
else:
self.logger.debug("No new token found")
def _hitjstoken(self, soup):
m = re.search("\/main\/index\/token\/([0-9]+)\/time\/", str(soup.head))
if m is not None:
t = m.group(1)
r = self._getraw("main/index/token/{}/time/{:.0f}.js".format(t, time()*1000), params={"t": "{:.16f}".format(random.random())})
self.logger.debug("Fetched JS timestamp token: %r", r.text)
def smsgw_request(self, filter_body='.*', timeout=30):
addr = config.get('SMSGW_ADDRESS', 'smsgw.hswaw-prod.svc.k0.hswaw.net:443')
with open(config['SMSGW_CERT'], 'rb') as fd:
api_cert = fd.read()
with open(config['SMSGW_KEY'], 'rb') as fd:
api_key = fd.read()
with open(config['SMSGW_CA'], 'rb') as fd:
api_ca = fd.read()
credentials = grpc.ssl_channel_credentials(
api_ca,
api_key,
api_cert,
)
channel = grpc.secure_channel(addr, credentials)
stub = smsgw_pb2_grpc.SMSGatewayStub(channel)
smsgw_request = smsgw_pb2.MessagesRequest(
filter_body=filter_body
)
return stub.Messages(smsgw_request, timeout=timeout)
def process_wallet_page(self, soup):
wallet = {"accounts": {}}
account_ids = []
for button in soup.find_all("button", class_="historia1"):
account_ids.append(re.search("\/accounts\/index\/([0-9]+)\/2", str(button["onclick"])).group(1))
accounts = []
for dt in soup.find_all("table", id="data"):
account = {}
cell = dt.find("td", class_="cell1")
if cell is None or cell.string is None:
continue
account["number"] = IBParser.parse_account_number((cell.string.strip()))
if account["number"] is None:
continue
cells = cell.find_next_siblings("td")
account["currency"] = cells[0].string.strip()
account["balance"] = cells[1].string.strip()
account["available_balance"] = cells[2].string.strip()
account["pln_balance"] = cells[3].string.strip()
accounts.append(account)
for account_id, account in zip(account_ids, accounts):
account["id"] = account_id
wallet["accounts"][account["number"]] = account
if len(wallet["accounts"]) == 0:
self.logger.error("Empty accounts list. Undetected failed login? Aborting.")
self._dump()
sys.exit(4)
return wallet
def login(self, username, password, interactive=False):
sms_re = r'Silne uwierzytelnienie do logowania. Kod SMS: (.*)'
smsgw_message = None
try:
smsgw_message = self.smsgw_request(sms_re)
except Exception as exc:
self.logger.warning('Couldn\'t create smsgw service, will go interactive', exc_info=exc)
login1_page = self._get("main/index")
self._wait(3)
data = {}
data["js"] = "true"
data["login"] = username
login2_page = self._post("main/index", data)
self._wait(3)
data = {}
password2_input = login2_page.find("input", attrs={"name": "password2"})
if password2_input is None:
self.logger.error("Masked password screen encountered - aborting")
sys.exit(4)
else:
self.logger.debug("Regular password screen encountered")
data["log2"] = username
data["password2"] = password2_input["value"]
data["password"] = password
twofa_page = self._post("main/index", data)
self._wait(3)
sms_input = twofa_page.find("input", attrs={"name": "sms"})
if sms_input is None:
self.logger.error('No SMS query - aborting')
sys.exit(4)
if smsgw_message:
msg = next(smsgw_message)
self.logger.debug('Got message: %r', msg)
code = re.findall(sms_re, msg.body)
data = {
"sms": code
}
else:
data = {
"sms": input('[?] OTP: ')
}
wallet_page = self._post("main/index", data)
if wallet_page.find("div", class_="login_form"):
self.logger.error("Login failed, aborting")
self._dump()
try:
self.logger.error("Possible reason: %r", ','.join(wallet_page.find("ul", class_="error_list").stripped_strings))
except:
pass # screw it, we're fucked anyway
sys.exit(4)
self._wait(2)
return self.process_wallet_page(wallet_page)
def fetch_account_history(self, account_id, start=None, end=None):
if end is None:
end = date.today()
if start is None:
start = date.today() - timedelta(days=30)
data = {
"code": account_id,
"report_type": "csv_dr",
"start_date": '{:02d}.{:02d}.{:04d}'.format(start.day, start.month, start.year),
"end_date": '{:02d}.{:02d}.{:04d}'.format(end.day, end.month, end.year),
"banking": self.token
}
r = self._postraw("accounts/getHistoryDailyReportsFile", data)
return r.content.decode("utf-8-sig").encode("utf-8")
def usage():
pass
def lock():
fn = config["LOCK_FILE"]
if os.path.isfile(fn):
logging.error("Lock file %s exists, aborting", fn)
sys.exit(3)
logging.debug("Setting up lock file %s", fn)
open(fn,'w').close()
if not os.path.isfile(fn):
logging.error("Lock file %s somehow does not exist, aborting", fn)
sys.exit(3)
def release():
fn = config["LOCK_FILE"]
logging.debug("Removing lock file %s", fn)
if not os.path.isfile(fn):
logging.error("Lock file %s somehow does not exist, WTF?", fn)
sys.exit(3)
os.remove(fn)
if os.path.isfile(fn):
logging.error("Lock file %s somehow still exists, WTF?", fn)
sys.exit(3)
parser = argparse.ArgumentParser()
parser.add_argument('-n', '--no-action', action="store_true", help='do not commit any database changes')
parser.add_argument('-i', '--interactive', action="store_true", help='ask interactively for credentials')
parser.add_argument('-c', '--cached', action="store_true", help='use cached data (test)')
parser.add_argument('-l', '--load', action='append', help='process specified files (test)')
parser.add_argument('--print-schema', action="store_true", help='print table schema and quit')
if __name__ == "__main__":
args = parser.parse_args()
CACHE_DIR = config["CACHE_DIR"]
engine = create_engine(config["SQLALCHEMY_DATABASE_URI"])
session = sessionmaker(bind=engine)()
if args.print_schema:
logging.debug("Called with --print-schema, will print the create statement and quit.")
m = MetaData()
print('%s;' % CreateTable(IBRow.__table__).compile(engine))
for index in IBRow.__table__.indexes:
print('%s;' % CreateIndex(index).compile(engine))
sys.exit()
lock()
balances = {}
history_logs = {}
if args.load:
logging.debug("Using manually supplied files")
for fn in args.load:
an, f = fn.split(':')
account_number = IBParser.parse_account_number(an)
if account_number is None:
logging.error("File name number \"{}\" unparseable".format(f))
continue
logging.debug('Loading "%s" as "%s"', f, account_number)
with open(f, 'r') as fd:
history_logs[account_number] = fd.read()
elif args.cached:
logging.debug("Loading cached files from {}".format(CACHE_DIR))
for f in os.listdir(CACHE_DIR):
if f.startswith('balance-'):
continue
account_number = IBParser.parse_account_number(f)
if account_number is None:
logging.error("File name number \"{}\" unparseable".format(f))
continue
with open(CACHE_DIR + "/" + f,'r') as fd:
history_logs[account_number] = fd.read()
logging.debug("Loading \"{}\" as \"{}\"".format(f, account_number))
else:
logging.debug("Normal run - will connect to the bank")
fetcher = IBFetcher()
if "IB_LOGIN" not in config.keys() or "IB_PASSWORD" not in config.keys() or args.interactive:
wallet = fetcher.login(input("[?] ID: "), input("[?] Password: "), args.interactive)
else:
logging.debug("Using saved credentials")
wallet = fetcher.login(config["IB_LOGIN"], config["IB_PASSWORD"])
for account_number, account in wallet["accounts"].items():
logging.debug("Fetching history for account {} ({})".format(account_number, account["id"]))
history = fetcher.fetch_account_history(account["id"])
history_logs[account_number] = history
with open(CACHE_DIR+"/"+account_number,'w') as fd:
fd.write(history)
balances[account_number] = (account["available_balance"], account["currency"])
with open(CACHE_DIR+"/balance-"+account_number,'w') as fd:
fd.write("{} {}\n".format(
account["available_balance"],account["currency"]))
if not history_logs:
logging.error('Nothing to process')
sys.exit()
parsed = {}
stats = {}
for account_number, history in history_logs.items():
logging.debug("Parsing history for account {}".format(account_number))
parser = IBParser(account_number)
try:
parser.parse(history)
except IBMaintenanceError:
logging.exception('Maintenance error, skipping')
continue
stats[account_number] = {}
stats[account_number]["added"] = 0
stats[account_number]["skipped"] = 0
for row in parser.get():
if not session.query(IBRow).filter_by(uid=row.uid).first():
session.add(row)
stats[account_number]["added"] += 1
else:
stats[account_number]["skipped"] += 1
if args.no_action:
logging.info('Running with --no-action, not commiting.')
else:
session.commit()
# That is pretty ugly, but the only alternative would be to change handler
# level in runtime, and that'd still need some rollback anyway.
if any(v['added'] for v in stats.values()):
log_summary = logging.info
else:
log_summary = logging.debug
if balances:
log_summary("Account balances:")
for account_number,v in balances.items():
balance,currency = v
log_summary("\t{}: {} {}".format(account_number, balance, currency))
log_summary("Done: %r", stats)
release()

View file

@ -1,500 +0,0 @@
#!/usr/bin/env/python2
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Piotr Dobrowolski <informatic@hackerspace.pl
# Based on IdeaBank crawler by Remigiusz Marcinkiewicz <remigiusz@marcinkiewicz.me>
# Based on iBRE/mBank CompanyNet crawler by Sergiusz Bazanski <q3k@q3k.org>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from datetime import date, datetime, timedelta
import sys
import time
import os
import re
import logging
import logging.config
import argparse
import json
import requests
from sqlalchemy import Column, Integer, String, Date, BigInteger, create_engine, MetaData
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
from sqlalchemy.schema import CreateTable, CreateIndex
from six.moves import input
from config import CurrentConfig
config = {
key: getattr(CurrentConfig, key)
for key in dir(CurrentConfig) if key.isupper()}
Base = declarative_base()
if config.get('LOGGING'):
logging.config.dictConfig(config['LOGGING'])
else:
logging.basicConfig(level=logging.DEBUG)
class RawTransfer(Base):
__tablename__ = 'raw_transfer'
id = Column(Integer, primary_key=True)
raw = Column(String)
uid = Column(String(128), index=True)
on_account = Column(String(32), index=True)
amount = Column(Integer)
currency = Column(String(8))
date = Column(Date)
type = Column(String(16))
index = Column(Integer)
title = Column(String(256))
balance = Column(Integer)
balance_currency = Column(String(8))
from_account = Column(String(32))
to_account = Column(String(32))
from_name = Column(String(256))
to_name = Column(String(256))
scrape_timestamp = Column(BigInteger, default=lambda: round(time.time() * 1000000))
class IBParseError(Exception):
pass
class IBMaintenanceError(Exception):
pass
class IBRow(RawTransfer):
SECRET = config["SECRET"]
OWN_ACCOUNTS = config["OWN_ACCOUNTS"]
def __unicode__(self):
if self.balance is not None:
return u"{} *{} #{} @{} -\"{}\" -#{} => +\"{}\" +#{} [{}.{:02d} {}] ({}.{:02d} {}) ~\"{}\"".format(
self.type, self.index, self.on_account, self.date, self.from_name, self.from_account,
self.to_name, self.to_account, self.amount/100, self.amount%100,
self.currency, self.balance/100, self.balance%100, self.balance_currency, self.title)
return u"{} *{} #{} @{} -\"{}\" -#{} => +\"{}\" +#{} [{}.{:02d} {}] (?) ~\"{}\"".format(
self.type, self.index, self.on_account, self.date, self.from_name, self.from_account,
self.to_name, self.to_account, self.amount/100, self.amount%100,
self.currency, self.title)
def __str__(self):
return unicode(self).encode("utf-8")
def __repr__(self):
return str(self)
def __init__(self, row, own_account):
self.date = datetime.strptime(row['date'], '%d.%m.%Y').date()
self.index = 1
self.raw = json.dumps(row, separators=(',', ':'))
self.uid = row['id']
self.title = row['title']
self.amount = int(round(row['amount'] * 100))
self.currency = row['currency']
self.on_account = own_account
self.from_name = row['remitterName']
self.from_account = IBParser.parse_account_number(row['remitterNrb'])
self.to_name = row['beneficiaryName']
self.to_account = IBParser.parse_account_number(row['beneficiaryNrb'])
direction = row['kind']
if direction == 'OUT' or direction == 'CARD_TRANS':
self.type = 'OUT'
self.amount = -self.amount
if row['operationType'] == 'SELF':
self.type = 'OUT_TO_OWN'
elif direction == 'IN':
self.type = 'IN'
if row['operationType'] == 'SELF':
self.type = 'IN_FROM_OWN'
elif direction == 'FEE':
self.type = 'BANK_FEE'
self.amount = -self.amount
if self.to_name != self.from_name and 'Prowizja za przelew natychmiastowy' not in self.title:
# TODO FIXME: false for instant transfer fees
raise IBParseError("Invalid to_name/from_name (%r / %r)" % (
self.to_name, self.from_name))
if self.from_account not in self.OWN_ACCOUNTS and self.to_account not in self.OWN_ACCOUNTS:
# if self.from_account not in self.OWN_ACCOUNTS or self.to_account not in self.OWN_ACCOUNTS:
raise IBParseError("Wrong to_account/from_account on bank fee transfer (%r / %r)" % (
self.to_account, self.from_account
))
# TODO FIXME: false for instant transfer fees
# To account seems to always be main account
# self.to_account = self.from_account
else:
raise IBParseError(
"Can't parse direction specifier \"{}\"".format(direction))
if None in (self.type, self.to_account, self.from_account, self.to_name, self.from_name):
print(row)
raise IBParseError(
"Something went wrong - one of the mandatory values empty",
self.type, self.to_account, self.from_account,
self.to_name, self.from_name)
class IBParser(object):
def __init__(self, account_number):
self.account_number = account_number
self.rows = []
self.fields = []
def parse(self, snapshot):
for tx in snapshot:
self.rows.append(IBRow(tx, self.account_number))
def get(self, type_=None, on_account=None):
return [
row for row in self.rows
if (row.type == type_ or type_ is None)
and (row.on_account == on_account or on_account is None)]
@staticmethod
def parse_account_number(s):
formats = [
"((?:[A-Za-z]{2})?[0-9]{2})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})", # 26 digits, optional country code - Poland
"((?:[A-Za-z]{2})?[0-9]{2})[ ]?([A-Z]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{2})", # 22 characters including BIC bank code - Ireland
"((?:[A-Za-z]{2})?[0-9]{2})[ ]?([A-Z]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{2})", # 18 characters including BIC bank code - Netherlands
"((?:[A-Za-z]{2})?[0-9]{14})", # 14 characters including BIC bank code - Belgium
"((?:[A-Za-z]{2})?[0-9]{2})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{4})[ ]?([0-9]{2})", # 22 digits - Germany
"^([0-9]{5})$", # 5 digits - weird special hax for Benevity (UK?)
]
for f in formats:
m = re.search(f, s)
if m is not None:
break
if m is None:
return None
account = "".join(m.groups())
if len(m.group(1)) == 2:
account = "PL" + account
return account
class IBFetcher(object):
BASE = "https://secure.ideabank.pl/"
START_DATE = "01.11.2016"
def __init__(self, config):
self.logger = logging.getLogger(self.__class__.__name__)
self.token = None
self.config = config
self.s = requests.Session()
self.s.headers.update({
"User-Agent": config.get("IB_UA", 'Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0'),
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate",
"DNT": "1",
"Upgrade-Insecure-Requests": "1",
"Pragma": "no-cache",
"Cache-Control": "no-cache"
})
self.s.cookies.update({
'ib_trusted_device': config.get('IB_TRUSTED_DEVICE_TOKEN', ''),
})
def _wait(self, seconds):
self.logger.debug("Waiting %d seconds", seconds)
time.sleep(seconds)
def _request(self, url, method='GET', *args, **kwargs):
if not url.startswith('http'):
url = 'https://cloud.ideabank.pl/api' + url
if 'params' not in kwargs:
kwargs['params'] = {}
kwargs['params'].update({
'v': round(time.time() * 1000),
})
if self.token:
if 'headers' not in kwargs:
kwargs['headers'] = {}
kwargs['headers'].update({
'Authentication-Token': self.token,
})
resp = self.s.request(method, url, *args, **kwargs)
resp.raise_for_status()
return resp
def _get(self, url, *args, **kwargs):
return self._request(url, 'GET', *args, **kwargs)
def _post(self, url, *args, **kwargs):
return self._request(url, 'POST', *args, **kwargs)
def login(self, login, password):
self._get('https://sso.cloud.ideabank.pl/authenticate/login', params={
'login': login,
})
login_resp = self._post('https://sso.cloud.ideabank.pl/authenticate/login', json={
'login': login,
'password': password,
}).json()
login_token = login_resp['token']
apilogin_resp = self._post('/login', data={
'token': login_token,
})
auth_token = apilogin_resp.history[0].cookies['Authentication-Token']
self.token = auth_token
self.logger.debug('Authentication token: %s', self.token)
return auth_token
def get_wallet(self):
accounts = self._get('/accounts').json()
wallet = {
'accounts': {},
}
for g in accounts['firmAccountGroups']:
for a in g['accounts']:
account_number = IBParser.parse_account_number(a['nrb']['value'])
wallet['accounts'][account_number] = {
'id': a['productId'],
'available_balance': a['balance'], # FIXME activeBalance?
'currency': a['currencyCode'],
}
return wallet
def fetch_account_history(self, account_id, start=None, end=None):
epoch_start = date(2019, 10, 20)
if end is None:
end = date.today()
if start is None:
start = date.today() - timedelta(days=30)
if start < epoch_start:
start = epoch_start
total_pages = 1
page = 0
history = []
while page < total_pages:
transactions = self._get('/transactions', params={
'from': '{:04d}-{:02d}-{:02d}'.format(start.year, start.month, start.day),
'to': '{:04d}-{:02d}-{:02d}'.format(end.year, end.month, end.day),
'page': str(page),
'productIds': account_id,
}).json()
history.extend(transactions['history'])
page += 1
total_pages = transactions['page']['totalPages']
return history
def usage():
pass
def lock():
fn = config["LOCK_FILE"]
if os.path.isfile(fn):
logging.error("Lock file %s exists, aborting", fn)
sys.exit(3)
logging.debug("Setting up lock file %s", fn)
open(fn,'w').close()
if not os.path.isfile(fn):
logging.error("Lock file %s somehow does not exist, aborting", fn)
sys.exit(3)
def release():
fn = config["LOCK_FILE"]
logging.debug("Removing lock file %s", fn)
if not os.path.isfile(fn):
logging.error("Lock file %s somehow does not exist, WTF?", fn)
sys.exit(3)
os.remove(fn)
if os.path.isfile(fn):
logging.error("Lock file %s somehow still exists, WTF?", fn)
sys.exit(3)
parser = argparse.ArgumentParser()
parser.add_argument('-n', '--no-action', action="store_true", help='do not commit any database changes')
parser.add_argument('-c', '--cached', action="store_true", help='use cached data (test)')
parser.add_argument('-l', '--load', action='append', help='process specified files (test)')
parser.add_argument('-t', '--token', help='use authentication token')
parser.add_argument('--start', type=lambda s: datetime.strptime(s, '%Y-%m-%d').date(), help='start date (YYYY-MM-DD)')
parser.add_argument('--end', type=lambda s: datetime.strptime(s, '%Y-%m-%d').date(), help='end date (YYYY-MM-DD)')
parser.add_argument('--no-lock', action='store_true', help='don\'t use lockfile (test)')
parser.add_argument('--print-schema', action="store_true", help='print table schema and quit')
if __name__ == "__main__":
args = parser.parse_args()
CACHE_DIR = config["CACHE_DIR"]
engine = create_engine(config["SQLALCHEMY_DATABASE_URI"])
session = sessionmaker(bind=engine)()
if args.print_schema:
logging.debug("Called with --print-schema, will print the create " +
"statement and quit.")
m = MetaData()
print('%s;' % CreateTable(IBRow.__table__).compile(engine))
for index in IBRow.__table__.indexes:
print('%s;' % CreateIndex(index).compile(engine))
sys.exit()
if not args.no_lock:
lock()
balances = {}
history_logs = {}
if args.load:
logging.debug("Using manually supplied files")
for fn in args.load:
an, f = fn.split(':')
account_number = IBParser.parse_account_number(an)
if account_number is None:
logging.error("File name number \"{}\" unparseable".format(f))
continue
logging.debug('Loading "%s" as "%s"', f, account_number)
with open(f, 'r') as fd:
history_logs[account_number] = json.loads(fd.read())
elif args.cached:
logging.debug("Loading cached files from {}".format(CACHE_DIR))
for f in os.listdir(CACHE_DIR):
if f.startswith('balance-'):
continue
account_number = IBParser.parse_account_number(f)
if account_number is None:
logging.error("File name number \"{}\" unparseable".format(f))
continue
with open(CACHE_DIR + "/" + f, 'r') as fd:
try:
history_logs[account_number] = json.loads(fd.read())
except Exception as e:
logging.error("Failed to decode {}: {}".format(f, e))
logging.debug("Loading \"{}\" as \"{}\"".format(f, account_number))
else:
logging.debug("Normal run - will connect to the bank")
fetcher = IBFetcher(config)
if args.token:
fetcher.token = args.token
logging.debug("Using provided token")
elif "IB_LOGIN" not in config.keys() or "IB_PASSWORD" not in config.keys():
fetcher.login(input("[?] ID: "), input("[?] Password: "))
else:
logging.debug("Using saved credentials")
fetcher.login(config["IB_LOGIN"], config["IB_PASSWORD"])
wallet = fetcher.get_wallet()
for account_number, account in wallet["accounts"].items():
logging.debug("Fetching history for account {} ({})".format(
account_number, account["id"]))
history = fetcher.fetch_account_history(account["id"], start=args.start, end=args.end)
history_logs[account_number] = history
with open(CACHE_DIR + "/" + account_number, 'w') as fd:
fd.write(json.dumps(history))
balances[account_number] = (
account["available_balance"], account["currency"])
with open(CACHE_DIR + "/balance-"+account_number, 'w') as fd:
fd.write("{} {}\n".format(
account["available_balance"], account["currency"]))
if not history_logs:
logging.error('Nothing to process')
sys.exit()
parsed = {}
stats = {}
for account_number, history in history_logs.items():
logging.debug("Parsing history for account {}".format(account_number))
parser = IBParser(account_number)
try:
parser.parse(history)
except IBMaintenanceError:
logging.exception('Maintenance error, skipping')
continue
stats[account_number] = {}
stats[account_number]["added"] = 0
stats[account_number]["skipped"] = 0
for row in parser.get():
if not session.query(IBRow).filter_by(uid=row.uid).first():
if args.no_action:
print(row)
session.add(row)
stats[account_number]["added"] += 1
else:
stats[account_number]["skipped"] += 1
if args.no_action:
logging.info('Running with --no-action, not commiting.')
else:
session.commit()
# That is pretty ugly, but the only alternative would be to change handler
# level in runtime, and that'd still need some rollback anyway.
if any(v['added'] for v in stats.values()):
log_summary = logging.info
else:
log_summary = logging.debug
if balances:
log_summary("Account balances:")
for account_number, v in balances.items():
balance, currency = v
log_summary("\t{}: {} {}".format(
account_number, balance, currency))
log_summary("Done: %r", stats)
try:
if stats.get('PL91195000012006000648890004',{}).get('added'):
msg = 'holla holla get dolla: {1[0]} {1[1]} (+{0})'.format(stats.get('PL91195000012006000648890004',{}).get('added'), balances.get('PL91195000012006000648890004'))
requests.post('http://hackerspace.pl:43288/moonspeak/1/notification', params={
'target': '#hackerspace-pl-members', 'message': msg,
})
except Exception as exc:
print(exc)
if not args.no_lock:
release()

View file

@ -153,7 +153,11 @@ class CAMT052Parser:
transfer.type = "IN"
transfer.index = 1
transfer.uid = txdtls.find("ns:Refs", ns).find("ns:InstrId", ns).text + '.' + transfer.type
transfer.uid = (
txdtls.find("ns:Refs", ns).find("ns:InstrId", ns).text
+ "."
+ transfer.type
)
transfer.on_account = on_account
transfer.raw = ET.tostring(entry).decode()
transfer.amount = int(Decimal(amt.text) * 100)
@ -169,6 +173,7 @@ class CAMT052Parser:
class PekaoClient:
resp = None
def __init__(self, config):
self.config = config
self.logger = logging.getLogger(self.__class__.__name__)
@ -219,7 +224,9 @@ class PekaoClient:
"MaskLoginForm",
{
"p_passmasked_bis": mask_password(
password, login_mask, alias,
password,
login_mask,
alias,
)
},
)
@ -278,7 +285,9 @@ class PekaoClient:
date_from = datetime.datetime.now() - datetime.timedelta(days=60)
if date_from < pekao_epoch:
self.logger.warning("Rolling back from %r to %r (pekao epoch", date_from, pekao_epoch)
self.logger.warning(
"Rolling back from %r to %r (pekao epoch", date_from, pekao_epoch
)
date_from = pekao_epoch
if date_to is None:
@ -331,13 +340,13 @@ class PekaoClient:
def _go(self, url, method="GET", **args):
self.logger.debug("=> %s %s", method, url)
if self.resp and self.resp.url:
self.session.headers['Referer'] = self.resp.url
self.session.headers["Referer"] = self.resp.url
self.resp = self.session.request(method, url, timeout=15, **args)
self.logger.debug(" -> [%d] %s", self.resp.status_code, self.resp.url)
self.resp.raise_for_status()
self.bs = BeautifulSoup(self.resp.text, features='html.parser')
self.bs = BeautifulSoup(self.resp.text, features="html.parser")
def _submit_form(self, name, values):
form = self.bs.find("form", {"name": name})
@ -355,11 +364,12 @@ def lock(fn):
logging.error("Lock file %s exists, aborting", fn)
sys.exit(3)
logging.debug("Setting up lock file %s", fn)
open(fn,'w').close()
open(fn, "w").close()
if not os.path.isfile(fn):
logging.error("Lock file %s somehow does not exist, aborting", fn)
sys.exit(3)
def release(fn):
logging.debug("Removing lock file %s", fn)
if not os.path.isfile(fn):
@ -370,48 +380,67 @@ def release(fn):
logging.error("Lock file %s somehow still exists, WTF?", fn)
sys.exit(3)
parser = argparse.ArgumentParser()
parser.add_argument('--config', help="Load configuration file")
parser.add_argument('-n', '--no-action', action="store_true", help='do not commit any database changes')
parser.add_argument('-c', '--cached', action="store_true", help='use cached data (test)')
parser.add_argument('-l', '--load', action='append', help='process specified files (test)')
parser.add_argument('-t', '--token', help='use authentication token')
parser.add_argument('--no-lock', action='store_true', help='don\'t use lockfile (test)')
parser.add_argument('--print-schema', action="store_true", help='print table schema and quit')
parser.add_argument("--config", help="Load configuration file")
parser.add_argument(
"-n", "--no-action", action="store_true", help="do not commit any database changes"
)
parser.add_argument(
"-c", "--cached", action="store_true", help="use cached data (test)"
)
parser.add_argument(
"-l", "--load", action="append", help="process specified files (test)"
)
parser.add_argument("-t", "--token", help="use authentication token")
parser.add_argument("--no-lock", action="store_true", help="don't use lockfile (test)")
parser.add_argument(
"--print-schema", action="store_true", help="print table schema and quit"
)
if __name__ == "__main__":
args = parser.parse_args()
config = configparser.ConfigParser(defaults=os.environ, interpolation=configparser.ExtendedInterpolation())
config.read_dict({
'logging': {
'level': 'INFO',
},
'general': {
'cache_dir': 'cache',
'lockfile': 'lockfile',
},
})
config = configparser.ConfigParser(
defaults=os.environ, interpolation=configparser.ExtendedInterpolation()
)
config.read_dict(
{
"logging": {
"level": "INFO",
},
"general": {
"cache_dir": "cache",
"lockfile": "lockfile",
},
}
)
if args.config:
config.read(args.config)
logging.basicConfig(level=config['logging']['level'], format=config['logging'].get('format', '%(asctime)s [%(levelname)s] %(name)s: %(message)s'))
logging.getLogger('chardet').setLevel(logging.WARN)
logging.getLogger('charset_normalizer').setLevel(logging.WARN)
logging.basicConfig(
level=config["logging"]["level"],
format=config["logging"].get(
"format", "%(asctime)s [%(levelname)s] %(name)s: %(message)s"
),
)
logging.getLogger("chardet").setLevel(logging.WARN)
logging.getLogger("charset_normalizer").setLevel(logging.WARN)
CACHE_DIR = config['general']['cache_dir']
engine = create_engine(config['database']['uri'])
CACHE_DIR = config["general"]["cache_dir"]
engine = create_engine(config["database"]["uri"])
session = sessionmaker(bind=engine)()
if args.print_schema:
logging.debug("Called with --print-schema, will print the create " +
"statement and quit.")
logging.debug(
"Called with --print-schema, will print the create " + "statement and quit."
)
print(get_schema(engine))
sys.exit()
if not args.no_lock:
lock(config['general']['lockfile'])
lock(config["general"]["lockfile"])
balances = {}
history_logs = {}
@ -419,62 +448,68 @@ if __name__ == "__main__":
if args.load:
logging.debug("Using manually supplied files")
for fn in args.load:
an, f = fn.split(':')
an, f = fn.split(":")
account_number = IBParser.parse_account_number(an)
if account_number is None:
logging.error("File name number \"{}\" unparseable".format(f))
logging.error('File name number "{}" unparseable'.format(f))
continue
logging.debug('Loading "%s" as "%s"', f, account_number)
with open(f, 'r') as fd:
with open(f, "r") as fd:
history_logs[account_number] = json.loads(fd.read())
elif args.cached:
logging.debug("Loading cached files from {}".format(CACHE_DIR))
for f in os.listdir(CACHE_DIR):
if f.startswith('balance-'):
if f.startswith("balance-"):
continue
account_number = CAMT052Parser.parse_account_number(f)
if account_number is None:
logging.error("File name number \"{}\" unparseable".format(f))
logging.error('File name number "{}" unparseable'.format(f))
continue
with open(CACHE_DIR + "/" + f, 'r') as fd:
with open(CACHE_DIR + "/" + f, "r") as fd:
history_logs[account_number] = fd.read()
logging.debug("Loading \"{}\" as \"{}\"".format(f, account_number))
logging.debug('Loading "{}" as "{}"'.format(f, account_number))
else:
logging.debug("Normal run - will connect to the bank")
fetcher = PekaoClient(config['scraper'])
fetcher = PekaoClient(config["scraper"])
if args.token:
fetcher.token = args.token
logging.debug("Using provided token")
elif "alias" not in config['scraper'] or "password" not in config['scraper']:
elif "alias" not in config["scraper"] or "password" not in config["scraper"]:
fetcher.login(input("[?] ID: "), input("[?] Password: "))
else:
logging.debug("Using saved credentials")
fetcher.login(config["scraper"]['alias'], config["scraper"]['password'])
fetcher.login(config["scraper"]["alias"], config["scraper"]["password"])
accounts = fetcher.list_accounts()
for account_id, account in accounts.items():
account_number = CAMT052Parser.parse_account_number(account['p_acc_no'])
logging.debug("Fetching history for account {} ({}) {}".format(
account_number, account_id, account["p_acc_alias"],
))
account_number = CAMT052Parser.parse_account_number(account["p_acc_no"])
logging.debug(
"Fetching history for account {} ({}) {}".format(
account_number,
account_id,
account["p_acc_alias"],
)
)
history = fetcher.fetch_transfers_camt052(account_id)
history_logs[account_number] = history
with open(CACHE_DIR + "/" + account_number, 'w') as fd:
fd.write('' if history is None else history)
with open(CACHE_DIR + "/" + account_number, "w") as fd:
fd.write("" if history is None else history)
balances[account_number] = (
account["p_acc_avail_balance"], account["p_acc_currency"])
with open(CACHE_DIR + "/balance-"+account_number, 'w') as fd:
account["p_acc_avail_balance"],
account["p_acc_currency"],
)
with open(CACHE_DIR + "/balance-" + account_number, "w") as fd:
fd.write("{} {}\n".format(*balances[account_number]))
if not history_logs:
logging.error('Nothing to process')
logging.error("Nothing to process")
sys.exit()
parsed = {}
@ -482,7 +517,7 @@ if __name__ == "__main__":
for account_number, history in history_logs.items():
logging.debug("Parsing history for account {}".format(account_number))
if not history:
logging.debug('No transfers for that account, continuing...')
logging.debug("No transfers for that account, continuing...")
continue
parser = CAMT052Parser(history, own_accounts=list(history_logs.keys()))
@ -499,13 +534,13 @@ if __name__ == "__main__":
stats[account_number]["skipped"] += 1
if args.no_action:
logging.info('Running with --no-action, not commiting.')
logging.info("Running with --no-action, not commiting.")
else:
session.commit()
# That is pretty ugly, but the only alternative would be to change handler
# level in runtime, and that'd still need some rollback anyway.
if any(v['added'] for v in stats.values()):
if any(v["added"] for v in stats.values()):
log_summary = logging.info
else:
log_summary = logging.debug
@ -514,10 +549,9 @@ if __name__ == "__main__":
log_summary("Account balances:")
for account_number, v in balances.items():
balance, currency = v
log_summary("\t{}: {} {}".format(
account_number, balance, currency))
log_summary("\t{}: {} {}".format(account_number, balance, currency))
log_summary("Done: %r", stats)
if not args.no_lock:
release(config['general']['lockfile'])
release(config["general"]["lockfile"])

View file

@ -1,9 +1,11 @@
; NOTE: you can use ${ENV_NAME} as values
[general]
cache_dir=cache/
lockfile=lockfile
cache_dir=/path/to/cache/folder
lockfile=/path/to/kasownik.lock
[database]
uri=sqlite:///./pekaobiznes.sqlite3
uri=sqlite:///../data.db
[scraper]
tdid=...trusted_device_id...

View file

@ -1,50 +0,0 @@
class Config(object):
DEBUG = False
TESTING = False
SQLALCHEMY_DATABASE_URI = "sqlite:///data.db"
OWN_ACCOUNTS = []
SECRET = 'setme'
SMSGW_CA = 'kasownik.ca.crt'
SMSGW_CERT = 'kasownik.tls.crt'
SMSGW_KEY = 'kasownik.tls.key'
CACHE_DIR = 'cache/'
LOCK_FILE = '/tmp/kasownik.lock'
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s',
'datefmt': '%Y-%m-%d %H:%M:%S',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'level': 'INFO',
'formatter': 'standard',
},
'file': {
'class': 'logging.handlers.TimedRotatingFileHandler',
'level': 'DEBUG',
'formatter': 'standard',
'filename': 'kasownik.log',
'backupCount': 7,
'when': 'midnight',
}
},
'loggers': {
'': {
'handlers': ['console', 'file'],
'level': 'DEBUG',
'propagate': True,
},
},
}
class DevelopmentConfig(Config):
DEBUG = True

View file

@ -1,21 +0,0 @@
#!/bin/sh
K_DIR="$HOME"
K_FETCH_DIR="$K_DIR/fetch/"
K_FETCH_LOG="$K_FETCH_DIR/fetch.log"
K_FETCH_ENV="$K_FETCH_DIR/.env"
set -o pipefail
. $K_FETCH_ENV/bin/activate
echo "Fetch started." | ts | tee -a "$K_FETCH_LOG"
python -u "$K_FETCH_DIR/banking-ibcloud.py" 2>&1 | ts | tee -a "$K_FETCH_LOG"
RET=$?
if [ $RET -ne 0 ]
then
exit $RET
fi
echo "Done." | ts | tee -a "$K_FETCH_LOG"

View file

@ -41,7 +41,7 @@ class RawTransfer(Base):
scrape_timestamp = Column(BigInteger, default=lambda: round(time.time() * 1000000))
def __str__(self):
return u'{} *{} #{} @{} -"{}" -#{} => +"{}" +#{} [{}.{:02d} {}] ~"{}"'.format(
return '{} *{} #{} @{} -"{}" -#{} => +"{}" +#{} [{}.{:02d} {}] ~"{}"'.format(
self.type,
self.uid,
self.on_account,

View file

@ -1,17 +0,0 @@
syntax = "proto3";
package proto;
option go_package = "code.hackerspace.pl/hscloud/hswaw/smsgw/proto";
message MessagesRequest {
string filter_body = 1;
}
message MessagesResponse {
string sender = 1;
string body = 3;
int64 timestamp = 4;
}
service SMSGateway {
rpc Messages(MessagesRequest) returns (stream MessagesResponse);
}

View file

@ -1,147 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: smsgw.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='smsgw.proto',
package='proto',
syntax='proto3',
serialized_options=_b('Z-code.hackerspace.pl/hscloud/hswaw/smsgw/proto'),
serialized_pb=_b('\n\x0bsmsgw.proto\x12\x05proto\"&\n\x0fMessagesRequest\x12\x13\n\x0b\x66ilter_body\x18\x01 \x01(\t\"C\n\x10MessagesResponse\x12\x0e\n\x06sender\x18\x01 \x01(\t\x12\x0c\n\x04\x62ody\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x32K\n\nSMSGateway\x12=\n\x08Messages\x12\x16.proto.MessagesRequest\x1a\x17.proto.MessagesResponse0\x01\x42/Z-code.hackerspace.pl/hscloud/hswaw/smsgw/protob\x06proto3')
)
_MESSAGESREQUEST = _descriptor.Descriptor(
name='MessagesRequest',
full_name='proto.MessagesRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='filter_body', full_name='proto.MessagesRequest.filter_body', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22,
serialized_end=60,
)
_MESSAGESRESPONSE = _descriptor.Descriptor(
name='MessagesResponse',
full_name='proto.MessagesResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='sender', full_name='proto.MessagesResponse.sender', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='body', full_name='proto.MessagesResponse.body', index=1,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='timestamp', full_name='proto.MessagesResponse.timestamp', index=2,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=62,
serialized_end=129,
)
DESCRIPTOR.message_types_by_name['MessagesRequest'] = _MESSAGESREQUEST
DESCRIPTOR.message_types_by_name['MessagesResponse'] = _MESSAGESRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
MessagesRequest = _reflection.GeneratedProtocolMessageType('MessagesRequest', (_message.Message,), dict(
DESCRIPTOR = _MESSAGESREQUEST,
__module__ = 'smsgw_pb2'
# @@protoc_insertion_point(class_scope:proto.MessagesRequest)
))
_sym_db.RegisterMessage(MessagesRequest)
MessagesResponse = _reflection.GeneratedProtocolMessageType('MessagesResponse', (_message.Message,), dict(
DESCRIPTOR = _MESSAGESRESPONSE,
__module__ = 'smsgw_pb2'
# @@protoc_insertion_point(class_scope:proto.MessagesResponse)
))
_sym_db.RegisterMessage(MessagesResponse)
DESCRIPTOR._options = None
_SMSGATEWAY = _descriptor.ServiceDescriptor(
name='SMSGateway',
full_name='proto.SMSGateway',
file=DESCRIPTOR,
index=0,
serialized_options=None,
serialized_start=131,
serialized_end=206,
methods=[
_descriptor.MethodDescriptor(
name='Messages',
full_name='proto.SMSGateway.Messages',
index=0,
containing_service=None,
input_type=_MESSAGESREQUEST,
output_type=_MESSAGESRESPONSE,
serialized_options=None,
),
])
_sym_db.RegisterServiceDescriptor(_SMSGATEWAY)
DESCRIPTOR.services_by_name['SMSGateway'] = _SMSGATEWAY
# @@protoc_insertion_point(module_scope)

View file

@ -1,46 +0,0 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
import smsgw_pb2 as smsgw__pb2
class SMSGatewayStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Messages = channel.unary_stream(
'/proto.SMSGateway/Messages',
request_serializer=smsgw__pb2.MessagesRequest.SerializeToString,
response_deserializer=smsgw__pb2.MessagesResponse.FromString,
)
class SMSGatewayServicer(object):
# missing associated documentation comment in .proto file
pass
def Messages(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_SMSGatewayServicer_to_server(servicer, server):
rpc_method_handlers = {
'Messages': grpc.unary_stream_rpc_method_handler(
servicer.Messages,
request_deserializer=smsgw__pb2.MessagesRequest.FromString,
response_serializer=smsgw__pb2.MessagesResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'proto.SMSGateway', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))

View file

@ -1,24 +1,23 @@
Kasownik webapp
===============
# Kasownik webapp
Setup
-----
## Quick start (old school)
pip install -r requirements.txt
# Copy example development environment config
cp config.py.dist config.py
1. [Register new SSO application](https://sso.hackerspace.pl/client/create) - client name and URI don't matter, redirect URI should be `http://localhost:5000/oauth/callback` (by default), enable `profile:read` scope; other settings can stay default.
2. ```sh
pip install -r requirements.txt
# (set up database, one time)
./manage.py syncdb
```
3. Run dev server: `SPACEAUTH_CONSUMER_KEY=xxxx SPACEAUTH_CONSUMER_SECRET=yyyy DISABLE_LDAP=true ./manage.py run -p 5000`
Database initialization
-----------------------
See `config.py` for more envs you can set
./manage.py syncdb
## Quick start (Dockerized)
Development server
------------------
1. Set `SPACEAUTH_CONSUMER_KEY` and `SPACEAUTH_CONSUMER_SECRET` envs
2. `docker-compose run --rm kasownik-web ./manage.py syncdb` (one time)
3. Run the app: `docker-compose up --build`
FLASK_DEBUG=1 ./manage.py run
## TODO
WSGI deployment
---------------
`webapp/wsgi.py` exports `app` object suitable for wsgi deployments.
Add missing table for fetcher, add example data

View file

@ -1,54 +0,0 @@
# Copyright (c) 2015, Sergiusz Bazanski
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import readline
import code
import requests
import hmac
import json
class APIClient(object):
def __init__(self, key, address="https://kasownik.hackerspace.pl"):
self.key = key
self.address = address.rstrip("/")
def __getattr__(self, name):
def f(**data):
serialized = json.dumps(data)
mac = hmac.new(self.key)
mac.update(serialized)
mac64 = mac.digest().encode("base64")
data = serialized.encode("base64") + "," + mac64
r = requests.post("%s/api/%s" % (self.address, name), data)
return json.loads(r.text)
return f
if __name__ == "__main__":
# invoke an interactive version
client = APIClient("testkey", "http://127.0.0.1:5000")
vars = globals().copy()
vars.update(locals())
shell = code.InteractiveConsole(vars)
shell.interact()

View file

@ -4,7 +4,7 @@ env = environs.Env()
env.read_env()
DEBUG = env.bool("DEBUG", False)
SQLALCHEMY_DATABASE_URI = env.str("SQLALCHEMY_DATABASE_URI", "sqlite:///data.db")
SQLALCHEMY_DATABASE_URI = env.str("SQLALCHEMY_DATABASE_URI", "sqlite:///../../data.db")
DUMMY_TRANSFER_UID = "NOTAMEMBER"
SPACEAUTH_CONSUMER_KEY = env.str("SPACEAUTH_CONSUMER_KEY", "kasownik")
@ -24,6 +24,10 @@ LDAP_USER_BASE = env.str("LDAP_USER_BASE", "ou=People,dc=hackerspace,dc=pl")
LDAP_GROUP_FILTER = env.str("LDAP_GROUP_FILTER", "(objectClass=groupOfUniqueNames)")
LDAP_GROUP_BASE = env.str("LDAP_GROUP_BASE", "ou=Group,dc=hackerspace,dc=pl")
SMTP_SERVER = env.str("SMTP_SERVER", "mail.hackerspace.pl")
SMTP_USER = env.str("SMTP_USER", "kasownik")
SMTP_PASSWORD = env.str("SMTP_PASSWORD", "changeme")
CACHE_TYPE = env.str("CACHE_TYPE", "null")
CACHE_NO_NULL_WARNING = True

View file

@ -1,9 +0,0 @@
[uwsgi]
plugins = python27
master = 1
threads = 10
chdir = /var/www/kasownik
venv = /var/www/kasownik/.env
module = webapp.wsgi
callable = app
debug = true

View file

@ -1,13 +1,15 @@
#!/usr/bin/env python
#!/usr/bin/env python3
import os
import click
from flask.cli import FlaskGroup
from webapp.wsgi import app
@click.group(cls=FlaskGroup, create_app=lambda i: app)
def cli():
"""This is a management script for Kasownik."""
if __name__ == '__main__':
if __name__ == "__main__":
cli()

View file

@ -14,7 +14,6 @@ enum34==1.1.6
environs==11.0.0
Flask==0.12.2
Flask-Caching==1.3.3
Flask-Gravatar==0.4.2
Flask-Login==0.4.1
Flask-OAuthlib==0.9.4
Flask-Prometheus==0.0.1

View file

@ -35,7 +35,6 @@ from flask import Flask, redirect
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager, AnonymousUserMixin, login_required, current_user
from flask_caching import Cache
from flask_gravatar import Gravatar
from spaceauth import SpaceAuth
app = Flask(__name__)
@ -44,14 +43,6 @@ app.config.from_object("config")
auth = SpaceAuth()
db = SQLAlchemy()
cache = Cache()
gravatar = Gravatar(
size=256,
rating="g",
default="retro",
force_default=False,
use_ssl=True,
base_url=None,
)
# TODO unsubscribe me from life
cache_enabled = False
@ -96,26 +87,12 @@ class DecimalEncoder(json.JSONEncoder):
def create_app():
db.init_app(app)
auth.init_app(app)
gravatar.init_app(app)
cache.init_app(app)
# Initialize middleware
if app.debug:
app.wsgi_app = sqltap.wsgi.SQLTapMiddleware(app.wsgi_app)
# Setup prometheus metrics
if app.config.get("PROMETHEUS_DIR"):
# This needs to be set before importing prometheus_client
os.environ["prometheus_multiproc_dir"] = app.config["PROMETHEUS_DIR"]
# FIXME: we could expose this somehow
from prometheus_flask_exporter.multiprocess import UWsgiPrometheusMetrics
metrics = UWsgiPrometheusMetrics(group_by="url_rule")
metrics.init_app(app)
# metrics.register_endpoint('/varz', app)
# Register blueprints
import webapp.views
import webapp.admin
@ -124,20 +101,6 @@ def create_app():
app.register_blueprint(webapp.admin.bp)
app.register_blueprint(webapp.api.bp)
# Custom filters
@app.template_filter("inflect")
def inflect(v, one, two, five):
num = abs(v)
if num == 0:
return "%d %s" % (v, five)
elif num == 1:
return "%d %s" % (v, one)
elif num <= 4:
return "%d %s" % (v, two)
return "%d %s" % (v, five)
# Custom CLI commands
import webapp.commands

View file

@ -1,19 +1,28 @@
# - * - coding=utf-8 - * -
import datetime
from email.mime.text import MIMEText
from subprocess import Popen, PIPE
from flask import render_template, request, flash, g, Response, \
redirect, url_for, abort, Blueprint, current_app
from flask import (
render_template,
request,
flash,
g,
Response,
redirect,
url_for,
abort,
Blueprint,
current_app,
)
from flask_login import login_required
from webapp import forms, db, models, admin_required
from webapp import forms, db, models, admin_required, email
from . import directory
from . import logic
bp = Blueprint('admin', __name__)
bp = Blueprint("admin", __name__)
@bp.route("/admin")
@admin_required
@ -21,20 +30,21 @@ bp = Blueprint('admin', __name__)
def index():
members = [m.get_status() for m in models.Member.get_members(True)]
for member in members:
due = member['months_due']
due = member["months_due"]
if due is not None and due < 1:
member['color'] = "00FF00"
member["color"] = "00FF00"
elif due is not None and due < 3:
member['color'] = "E0941B"
member["color"] = "E0941B"
else:
member['color'] = "FF0000"
active_members = list(filter(lambda m: m['judgement'], members))
inactive_members = list(filter(lambda m: not m['judgement'], members))
return render_template("admin_index.html",
active_members=active_members,
inactive_members=inactive_members,
transfers_unmatched=logic.get_unmatched_transfers()
)
member["color"] = "FF0000"
active_members = list(filter(lambda m: m["judgement"], members))
inactive_members = list(filter(lambda m: not m["judgement"], members))
return render_template(
"admin_index.html",
active_members=active_members,
inactive_members=inactive_members,
transfers_unmatched=logic.get_unmatched_transfers(),
)
@bp.route("/admin/ldapsync", methods=["POST", "GET"])
@ -48,25 +58,27 @@ def admin_ldap_sync():
form = forms.LDAPSyncForm(request.form)
form.fatty_to_add.choices = zip(diff['fatty_to_add'], diff['fatty_to_add'])
form.fatty_to_add.default = diff['fatty_to_add']
form.fatty_to_add.choices = zip(diff["fatty_to_add"], diff["fatty_to_add"])
form.fatty_to_add.default = diff["fatty_to_add"]
form.fatty_to_remove.choices = zip(diff['fatty_to_remove'], diff['fatty_to_remove'])
form.fatty_to_remove.default = diff['fatty_to_remove']
form.fatty_to_remove.choices = zip(diff["fatty_to_remove"], diff["fatty_to_remove"])
form.fatty_to_remove.default = diff["fatty_to_remove"]
form.starving_to_add.choices = zip(diff['starving_to_add'], diff['starving_to_add'])
form.starving_to_add.default = diff['starving_to_add']
form.starving_to_add.choices = zip(diff["starving_to_add"], diff["starving_to_add"])
form.starving_to_add.default = diff["starving_to_add"]
form.starving_to_remove.choices = zip(diff['starving_to_remove'], diff['starving_to_remove'])
form.starving_to_remove.default = diff['starving_to_remove']
form.starving_to_remove.choices = zip(
diff["starving_to_remove"], diff["starving_to_remove"]
)
form.starving_to_remove.default = diff["starving_to_remove"]
form.process(request.form)
if form.validate():
changes = {'fatty': {}, 'starving': {}}
changes['fatty']['add'] = form.fatty_to_add.data
changes['fatty']['remove'] = form.fatty_to_remove.data
changes['starving']['add'] = form.starving_to_add.data
changes['starving']['remove'] = form.starving_to_remove.data
changes = {"fatty": {}, "starving": {}}
changes["fatty"]["add"] = form.fatty_to_add.data
changes["fatty"]["remove"] = form.fatty_to_remove.data
changes["starving"]["add"] = form.starving_to_add.data
changes["starving"]["remove"] = form.starving_to_remove.data
directory.update_member_groups(g.ldap, changes)
@ -80,17 +92,20 @@ def admin_csv():
members = []
for m in models.Member.get_members(True):
member = m.get_status()
if member['type'] == 'supporting':
if member["type"] == "supporting":
continue
member['contact_email'] = m.get_contact_email()
member['cn'] = directory.get_member_fields(g.ldap, member['username'], 'cn')['cn']
member["contact_email"] = m.get_contact_email()
member["cn"] = directory.get_member_fields(g.ldap, member["username"], "cn")[
"cn"
]
members.append(member)
active_members = filter(lambda m: m['judgement'], members)
active_members = filter(lambda m: m["judgement"], members)
output = render_template("admin_csv.html", active_members=active_members)
return Response(output)
@bp.route('/admin/member/<membername>', methods=['GET', 'POST'])
@bp.route("/admin/member/<membername>", methods=["GET", "POST"])
@login_required
@admin_required
def admin_member(membername):
@ -98,16 +113,23 @@ def admin_member(membername):
if not member:
abort(404)
status = member.get_status()
cn = directory.get_member_fields(g.ldap, member.username, 'cn')['cn']
cn = directory.get_member_fields(g.ldap, member.username, "cn")["cn"]
admin_form = forms.AdminProfileEdit(obj=member)
if admin_form.validate():
admin_form.populate_obj(member)
db.session.commit()
flash('Member info changed')
flash("Member info changed")
return render_template(
"admin_member.html",
member=member,
status=status,
cn=cn,
admin=True,
admin_form=admin_form,
)
return render_template("admin_member.html", member=member, status=status,
cn=cn, admin=True, admin_form=admin_form)
@bp.route("/admin/member/<membername>/policy:<policy>")
@login_required
@ -117,7 +139,8 @@ def admin_member_set_policy(membername, policy):
member.payment_policy = models.PaymentPolicy[policy].value
db.session.add(member)
db.session.commit()
return redirect(request.referrer or url_for('.admin_member', membername=membername))
return redirect(request.referrer or url_for(".admin_member", membername=membername))
@bp.route("/admin/member/<membername>/membership:<membershiptype>")
@login_required
@ -127,18 +150,21 @@ def admin_member_set_membership(membername, membershiptype):
member.type = models.MembershipType[membershiptype].name
db.session.add(member)
db.session.commit()
return redirect(request.referrer or url_for('.admin_member', membername=membername))
return redirect(request.referrer or url_for(".admin_member", membername=membername))
@bp.route("/admin/member/add/<membershiptype>/<username>")
@login_required
@admin_required
def add_member(membershiptype, username):
member = models.Member(None, username, models.MembershipType[membershiptype].name, True)
member = models.Member(
None, username, models.MembershipType[membershiptype].name, True
)
db.session.add(member)
db.session.commit()
flash('Member created')
return redirect(request.referrer or url_for('.match_manual'))
flash("Member created")
return redirect(request.referrer or url_for(".match_manual"))
@bp.route("/admin/match")
@login_required
@ -159,14 +185,18 @@ def match_auto():
mts = transfer.member_transfers
member = mts[0].member
member.get_status(force_refresh=True)
months = ', '.join('%d-%d' % (mt.year, mt.month) for mt in mts)
flash("Matched transfer {} for {:.2f}PLN to member {} for month {}".format(
transfer.id, transfer.amount/100, member.username, months))
months = ", ".join("%d-%d" % (mt.year, mt.month) for mt in mts)
flash(
"Matched transfer {} for {:.2f}PLN to member {} for month {}".format(
transfer.id, transfer.amount / 100, member.username, months
)
)
db.session.commit()
flash("Matched %i, %i left" % (len(matched), len(unmatched)))
return redirect(url_for(".match_index"))
@bp.route("/admin/match/manual", methods=["GET"])
@login_required
@admin_required
@ -174,33 +204,35 @@ def match_manual():
transfers_unmatched = logic.get_unmatched_transfers()
return render_template("match_manual.html", transfers_unmatched=transfers_unmatched)
@bp.route("/admin/match/ignore/<path:uid>")
@login_required
@admin_required
def ignore(uid):
transfer = models.Transfer.query.filter_by(uid=uid).first()
if not transfer:
flash('No transfer found', 'danger')
flash("No transfer found", "danger")
return redirect(url_for(".match_manual"))
transfer.ignore = True
db.session.commit()
flash('Transfer %s ignored' % (transfer,))
flash("Transfer %s ignored" % (transfer,))
return redirect(request.referrer)
@bp.route("/admin/match/<username>/<int:months>/<path:uid>")
@login_required
@admin_required
def match(username, uid, months):
member = models.Member.query.filter_by(username=username).first()
if not member:
flash('No member found', 'danger')
flash("No member found", "danger")
return redirect(url_for(".match_manual"))
transfer = models.Transfer.query.filter_by(uid=uid).first()
if not transfer:
flash('No transfer found', 'danger')
flash("No transfer found", "danger")
return redirect(url_for(".match_manual"))
for _ in range(months):
@ -212,7 +244,7 @@ def match(username, uid, months):
db.session.commit()
member.get_status(force_refresh=True)
flash('OK, %d get' % transfer.amount)
flash("OK, %d get" % transfer.amount)
return redirect(url_for(".match_manual"))
@ -224,30 +256,40 @@ def match_user_transfer():
uid = request.form["uid"]
member = models.Member.query.filter_by(username=username).first()
if not member:
flash('No member found', 'danger')
flash("No member found", "danger")
return redirect(url_for(".match_manual"))
transfer = models.Transfer.query.filter_by(uid=uid).first()
if not transfer:
flash('No transfer found', 'danger')
flash("No transfer found", "danger")
return redirect(url_for(".match_manual"))
return render_template("match_user_transfer.html", member=member, transfer=transfer)
@bp.route("/admin/spam/", methods=["GET", "POST"])
@login_required
@admin_required
def sendspam():
now = datetime.datetime.now()
members = models.Member.get_members(True).filter_by(
payment_policy=models.PaymentPolicy.normal.value).all()
members = (
models.Member.get_members(True)
.filter_by(payment_policy=models.PaymentPolicy.normal.value)
.all()
)
members = [(m, m.get_status()) for m in members]
members.sort(key=lambda m: (-m[1]['months_due'] or 0))
members.sort(key=lambda m: (-m[1]["months_due"] or 0))
form = forms.SpamForm()
form.members.choices = [(member.id, str(member)) for member, status in members if status['months_due'] or status['judgement']]
form.members.default = [member.id for member, status in members if status['months_due'] > 1]
form.members.choices = [
(member.id, str(member))
for member, status in members
if status["months_due"] or status["judgement"]
]
form.members.default = [
member.id for member, status in members if status["months_due"] > 1
]
form.process(request.form)
@ -258,32 +300,35 @@ def sendspam():
continue
content = render_template(
'mailing/due.txt',
"mailing/due.txt",
member=member,
status=status,
transfers=[t for t in member.transfers if t.transfer.uid != current_app.config['DUMMY_TRANSFER_UID']][-5:],
now=now)
transfers=[
t
for t in member.transfers
if t.transfer.uid != current_app.config["DUMMY_TRANSFER_UID"]
][-5:],
now=now,
)
# Just ignore empty messages
if not content.strip():
continue
msg = MIMEText(content, "plain", "utf-8")
msg["From"] = "Kasownik Hackerspace'owy <kasownik@hackerspace.pl>"
msg["Subject"] = "Stan składek na dzień %s" % now.strftime("%d/%m/%Y")
msg["To"] = member.get_contact_email()
spam.append(msg)
recipient_email = member.get_contact_email()
spam.append((recipient_email, content))
if form.dry_run.data:
readable = [
msg.as_string().split('\n\n')[0] + '\n\n'
+ msg.get_payload(decode=True).decode('utf-8') for msg in spam]
return Response('\n====\n'.join(readable), mimetype='text/plain')
f"To: {recipient_email}\n\n{content}"
for recipient_email, content in spam
]
return Response("\n====\n".join(readable), mimetype="text/plain")
for msg in spam:
p = Popen(["/usr/sbin/sendmail", "-t"], stdin=PIPE)
p.communicate(msg.as_bytes())
smpt_conn = email.get_connection()
for recipient_email, content in spam:
email.send_email(smpt_conn, "Reminder about your Warsaw Hackerspace membership fees", content, recipient_email)
flash('%d messages sent!' % len(spam))
return redirect(url_for('.index'))
return render_template('admin_spam.html', form=form)
flash("%d messages sent!" % len(spam))
return redirect(url_for(".index"))
return render_template("admin_spam.html", form=form)

View file

@ -23,7 +23,6 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import hmac
import json
import datetime
import logging
@ -35,7 +34,8 @@ from flask import request, abort, Response, Blueprint
from webapp import models, app, cache
logger = logging.getLogger(__name__)
bp = Blueprint('api', __name__)
bp = Blueprint("api", __name__)
class APIError(Exception):
def __init__(self, message, code=500):
@ -47,6 +47,7 @@ def _public_api_method(path):
"""A decorator that adds a public, GET based method at /api/<path>.json.
The resulting data is JSON-serialized."""
def decorator2(original):
@wraps(original)
def wrapper_json(*args, **kwargs):
@ -59,135 +60,28 @@ def _public_api_method(path):
code = exc.code
status = "error"
last_transfer = models.Transfer.query.order_by(models.Transfer.date.desc()).first()
last_transfer = models.Transfer.query.order_by(
models.Transfer.date.desc()
).first()
modified = str(last_transfer.date) if last_transfer else None
resp = {
"status": status,
"content": content,
"modified": modified
}
resp = {"status": status, "content": content, "modified": modified}
return Response(json.dumps(resp), mimetype="application/json"), code
return bp.route("/api/" + path + ".json", methods=["GET"])(wrapper_json)
return decorator2
def _private_api_method(path):
"""A decorator that adds a private, HMACed, POST based method at /api/path.
The JSON-decoded POSTbody is stored as request.decoded.
The resulting data is also JSON-encoded.
It also that ensures that the request is authorized if 'private' is True.
If so, it also adds a request.api_member object that points to a member if an
API key should be limited to that member (for example, when handing over
keys to normal members)."""
def decorator(original):
@wraps(original)
def wrapper(*args, **kwargs):
if request.data.count(",") != 1:
abort(400)
message64, mac64 = request.data.split(",")
try:
message = message64.decode("base64")
mac = mac64.decode("base64")
except:
abort(400)
for key in models.APIKey.query.all():
mac_verify = hmac.new(key.secret.encode("utf-8"))
mac_verify.update(message)
if mac_verify.digest() == mac:
break
else:
abort(403)
if key.member:
request.api_member = key.member
else:
request.api_member = None
try:
if request.data:
request.decoded = json.loads(request.data.decode("base64"))
else:
request.decoded = {}
except Exception:
logger.exception('Request decode failed')
abort(400)
return json.dumps(original(*args, **kwargs))
return bp.route("/api/" + path, methods=["POST"])(wrapper)
return decorator
@_private_api_method("list_members")
def api_members():
if request.api_member:
abort(403)
members = [member.username for member in models.Member.query.all()]
return members
@_private_api_method("get_member_info")
def api_member():
mid = request.decoded["member"]
if request.api_member and request.api_member.username != mid:
abort(403)
member = models.Member.query.filter_by(username=mid).join(models.Member.transfers).\
join(models.MemberTransfer.transfer).first()
mts = member.transfers
response = {}
response["paid"] = []
for mt in mts:
t = {}
t["year"] = mt.year
t["month"] = mt.month
transfer = {}
transfer["uid"] = mt.transfer.uid
transfer["amount"] = mt.transfer.amount
transfer["title"] = mt.transfer.title
transfer["account"] = mt.transfer.account_from
transfer["from"] = mt.transfer.name_from
t["transfer"] = transfer
response["paid"].append(t)
response["months_due"] = member.get_months_due()
response["membership"] = member.type
return response
@cache.memoize()
def _stats_for_month(year, month):
# TODO: export this to the config
money_required = 4217+615+615
money_paid = 0
mts = models.MemberTransfer.query.filter_by(year=year, month=month).\
join(models.MemberTransfer.transfer).all()
for mt in mts:
amount_all = mt.transfer.amount
amount = amount_all / len(mt.transfer.member_transfers)
money_paid += amount
return money_required, money_paid/100
@_public_api_method("month/<year>/<month>")
def api_month(year=None, month=None):
money_required, money_paid = _stats_for_month(year, month)
return dict(required=money_required, paid=money_paid)
@_public_api_method("mana")
def api_manamana():
"""To-odee doo-dee-doo!"""
now = datetime.datetime.now()
money_required, money_paid = _stats_for_month(now.year, now.month)
return dict(required=money_required, paid=money_paid)
@_public_api_method("judgement/<membername>")
def api_judgement(membername):
member = models.Member.query.filter_by(username=membername).first()
if not member:
raise APIError("No such member.", 404)
judgement = member.get_status()['judgement']
judgement = member.get_status()["judgement"]
return judgement
@_public_api_method("months_due/<membername>")
@cache.memoize()
def api_months_due(membername):
@ -197,22 +91,7 @@ def api_months_due(membername):
year, month = member.get_last_paid()
if not year:
raise APIError("Member never paid.", 402)
if year and member.active == False and member.username == 'b_rt':
raise APIError("Stoned.",420)
if year and member.active == False:
raise APIError("No longer a member.", 410)
due = member.get_months_due()
return due
@_public_api_method("cashflow/<int:year>/<int:month>")
@cache.memoize()
def api_cashflow(year, month):
start = datetime.date(year=year, month=month, day=1)
month += 1
if month > 12:
month = 1
year += 1
end = datetime.date(year=year, month=month, day=1)
transfers = models.Transfer.query.filter(and_(models.Transfer.date >= start, models.Transfer.date < end, models.Transfer.ignore == False)).all()
amount_in = sum(t.amount for t in transfers)
return {"in": amount_in/100, "out": -1}

View file

@ -3,9 +3,11 @@ from flask_login import AnonymousUserMixin
from spaceauth.caps import cap_check
from webapp import models
class AnonymousUser(AnonymousUserMixin):
is_admin = False
class User(object):
def __init__(self, username):
self.username = username.lower().strip()
@ -27,8 +29,7 @@ class User(object):
@property
def is_admin(self):
return cap_check('kasownik_access', self.username)
return cap_check("kasownik_access", self.username)
def get_model(self, deep=True):
return models.Member.get_members(deep) \
.filter_by(username=self.username).first()
return models.Member.get_members(deep).filter_by(username=self.username).first()

View file

@ -9,8 +9,9 @@ from . import logic
group = AppGroup(__name__)
@group.command()
@click.option('-n', '--dry-run', is_flag=True, help='Don\'t apply any changes.')
@click.option("-n", "--dry-run", is_flag=True, help="Don't apply any changes.")
def ldapsync(dry_run):
"""Synchronizes LDAP groups state."""
@ -23,29 +24,30 @@ def ldapsync(dry_run):
if diff is None:
return
changes = {'fatty': {}, 'starving': {}}
changes['fatty']['add'] = diff['fatty_to_add']
changes['fatty']['remove'] = diff['fatty_to_remove']
changes['starving']['add'] = diff['starving_to_add']
changes['starving']['remove'] = diff['starving_to_remove']
changes = {"fatty": {}, "starving": {}}
changes["fatty"]["add"] = diff["fatty_to_add"]
changes["fatty"]["remove"] = diff["fatty_to_remove"]
changes["starving"]["add"] = diff["starving_to_add"]
changes["starving"]["remove"] = diff["starving_to_remove"]
click.echo('Applying %d changes:' % sum([len(n) for n in diff.values()]))
click.echo("Applying %d changes:" % sum([len(n) for n in diff.values()]))
for k, v in changes.items():
changelist = ['+%s' % n for n in v['add']] + ['-%s' % n for n in v['remove']]
changelist = ["+%s" % n for n in v["add"]] + ["-%s" % n for n in v["remove"]]
if changelist:
click.echo('\t%s: %s' % (k, ', '.join(changelist)))
click.echo("\t%s: %s" % (k, ", ".join(changelist)))
click.echo()
if dry_run:
click.echo('Exiting, just a dry run.')
click.echo("Exiting, just a dry run.")
return
directory.update_member_groups(g.ldap, changes)
click.echo('Done.')
click.echo("Done.")
@group.command()
@click.option('-n', '--dry-run', is_flag=True, help='Don\'t commit changes.')
@click.option("-n", "--dry-run", is_flag=True, help="Don't commit changes.")
def automatch(dry_run):
"""Matches transfers to membership months."""
transfers_unmatched = logic.get_unmatched_transfers()
@ -58,9 +60,12 @@ def automatch(dry_run):
if not dry_run:
member.get_status(force_refresh=True)
months = ', '.join('%d-%d' % (mt.year, mt.month) for mt in mts)
click.echo("Matched transfer {} for {:.2f}PLN to member {} for month {}".format(
transfer.id, transfer.amount/100, member.username, months))
months = ", ".join("%d-%d" % (mt.year, mt.month) for mt in mts)
click.echo(
"Matched transfer {} for {:.2f}PLN to member {} for month {}".format(
transfer.id, transfer.amount / 100, member.username, months
)
)
if dry_run:
click.echo("Dry run, not commiting.")
@ -71,8 +76,9 @@ def automatch(dry_run):
if matched:
click.echo("Done, %d matched, %d left" % (len(matched), len(unmatched)))
@group.command()
def syncdb():
"""Initializes database."""
db.create_all()
click.echo('Done.')
click.echo("Done.")

View file

@ -14,14 +14,14 @@ with transfer2 as (
and title not like 'Lokata nr DP%'
),
transfer3 as (
select dt, case currency when 'PLN' then amount_rel when 'EUR' then amount_rel * :EURPLN_RATE else NULL end as amount_c, to_name from transfer2
select dt, case currency when 'PLN' then amount_rel when 'EUR' then amount_rel * :EURPLN_RATE else NULL end as amount_c, to_name from transfer2
),
monthly_incomes as (
select
dt,
sum(amount_c)/100.0 as month_balance_i
from transfer3
where to_name not like '%PSP Zjednoczenie%' and amount_c > 0
where to_name !~* 'zj ?ednoczenie' and amount_c > 0
group by dt order by dt
),
monthly_rest as (
@ -29,7 +29,7 @@ monthly_rest as (
dt,
sum(amount_c)/100.0 as month_balance_r
from transfer3
where to_name not like '%PSP Zjednoczenie%' and amount_c < 0
where to_name !~* 'zj ?ednoczenie' and amount_c < 0
group by dt order by dt
),
monthly_psp as (
@ -37,7 +37,7 @@ monthly_psp as (
dt,
sum(amount_c)/100.0 as month_balance_p
from transfer3
where to_name like '%PSP Zjednoczenie%'
where to_name ~* 'zj ?ednoczenie'
group by dt order by dt
),
joined as (

View file

@ -11,7 +11,7 @@
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
@ -33,45 +33,60 @@ from webapp import mc, cache_enabled, app
def connect():
c = ldap.initialize(app.config['LDAP_URI'])
c.set_option(ldap.OPT_X_TLS_CACERTFILE, app.config['LDAP_CA_PATH'])
c = ldap.initialize(app.config["LDAP_URI"])
c.set_option(ldap.OPT_X_TLS_CACERTFILE, app.config["LDAP_CA_PATH"])
c.set_option(ldap.OPT_X_TLS_NEWCTX, 0)
c.start_tls_s()
c.simple_bind_s(app.config['LDAP_BIND_DN'],
app.config['LDAP_BIND_PASSWORD'])
c.simple_bind_s(app.config["LDAP_BIND_DN"], app.config["LDAP_BIND_PASSWORD"])
return c
@app.before_request
def _setup_ldap():
if not app.config.get('DISABLE_LDAP'):
if not app.config.get("DISABLE_LDAP"):
g.ldap = connect()
else:
g.ldap = None
@app.teardown_request
def _destroy_ldap(exception=None):
if g.ldap:
g.ldap.unbind_s()
def get_ldap_group_diff(members):
active_members = list(filter(lambda m: m['judgement'], members))
fatty = set([member['username'] for member in active_members if member['type'] in ['fatty', 'supporting']])
starving = set([member['username'] for member in active_members if member['type'] in ['starving']])
ldap_fatty = set(get_group_members(g.ldap, 'fatty'))
ldap_starving = set(get_group_members(g.ldap, 'starving'))
ldap_potato = set(get_group_members(g.ldap, 'potato'))
active_members = list(filter(lambda m: m["judgement"], members))
fatty = set(
[
member["username"]
for member in active_members
if member["type"] in ["fatty", "supporting"]
]
)
starving = set(
[
member["username"]
for member in active_members
if member["type"] in ["starving"]
]
)
ldap_fatty = set(get_group_members(g.ldap, "fatty"))
ldap_starving = set(get_group_members(g.ldap, "starving"))
ldap_potato = set(get_group_members(g.ldap, "potato"))
result = {}
result['fatty_to_remove'] = list(ldap_fatty - fatty)
result['fatty_to_add'] = list(fatty - ldap_fatty)
result['starving_to_remove'] = list(ldap_starving - starving)
result['starving_to_add'] = list(starving - ldap_starving)
result["fatty_to_remove"] = list(ldap_fatty - fatty)
result["fatty_to_add"] = list(fatty - ldap_fatty)
result["starving_to_remove"] = list(ldap_starving - starving)
result["starving_to_add"] = list(starving - ldap_starving)
if sum([len(result[k]) for k in result]) == 0:
return None
return result
# kinda clunky with all the member fetching, transforming the list in various ways and updating it again here, but it's a workaround for LDAP crashing on modify_s, no fucks given
def update_member_groups(c, changes):
for group in changes:
@ -79,86 +94,94 @@ def update_member_groups(c, changes):
changed = False
for op in changes[group]:
for username in changes[group][op]:
if op == 'add':
if get_member_fields(c, username, ['uid'])['uid'] is None:
logging.warning('User %r missing from LDAP, ignoring...', username)
if op == "add":
if get_member_fields(c, username, ["uid"])["uid"] is None:
logging.warning(
"User %r missing from LDAP, ignoring...", username
)
continue
changed = True
target_members.add(username)
elif op == 'remove':
elif op == "remove":
changed = True
target_members.remove(username)
if not changed:
continue
values = []
for username in target_members:
values.append('uid={},{}'.format(username,app.config['LDAP_USER_BASE']).encode('utf-8'))
modlist = [(ldap.MOD_REPLACE,'uniqueMember',values)]
#print group,modlist
c.modify_s('cn={},{}'.format(group,app.config['LDAP_GROUP_BASE']), modlist)
values.append(
"uid={},{}".format(username, app.config["LDAP_USER_BASE"]).encode(
"utf-8"
)
)
modlist = [(ldap.MOD_REPLACE, "uniqueMember", values)]
c.modify_s("cn={},{}".format(group, app.config["LDAP_GROUP_BASE"]), modlist)
# keeping it here instead of git history because it's preferable to the other method, as long as LDAP stops crashing
def update_member_groups_fucked(c, changes):
ops = {'add': ldap.MOD_ADD, 'remove': ldap.MOD_DELETE}
for group in changes:
modlist = []
for op in changes[group]:
values = []
for username in changes[group][op]:
values.append('uid={},{}'.format(username.encode('utf-8'),app.config['LDAP_USER_BASE']))
if values:
modlist.append((ops[op],'uniqueMember',values))
#print group, modlist
c.modify_s('cn={},{}'.format(group.encode('utf-8'),app.config['LDAP_GROUP_BASE']), modlist)
def get_group_members(c, group):
if app.config.get('DISABLE_LDAP'):
if app.config.get("DISABLE_LDAP"):
return []
lfilter = '(&(cn={}){})'.format(group, app.config['LDAP_GROUP_FILTER'])
data = c.search_s(app.config['LDAP_GROUP_BASE'], ldap.SCOPE_SUBTREE,
lfilter, tuple(['uniqueMember',]))
lfilter = "(&(cn={}){})".format(group, app.config["LDAP_GROUP_FILTER"])
data = c.search_s(
app.config["LDAP_GROUP_BASE"],
ldap.SCOPE_SUBTREE,
lfilter,
tuple(
[
"uniqueMember",
]
),
)
members = []
for dn, obj in data:
for k, v in obj.items():
if k == "uniqueMember":
for iv in v:
part,uid,index = ldap.dn.str2dn(iv)[0][0]
if not part == 'uid' or not index == 1:
raise ValueError("First part type {} or index {} seem wrong for DN {}".format(part,index,iv))
part, uid, index = ldap.dn.str2dn(iv)[0][0]
if not part == "uid" or not index == 1:
raise ValueError(
"First part type {} or index {} seem wrong for DN {}".format(
part, index, iv
)
)
members.append(uid)
return members
def get_member_fields(c, member, fields):
if app.config.get('DISABLE_LDAP'):
if app.config.get("DISABLE_LDAP"):
import collections
return collections.defaultdict(str)
if isinstance(fields, str):
fields = [fields,]
fields = [
fields,
]
fields_needed = set(fields)
fields_out = {}
if cache_enabled:
for field in fields:
field_cache = mc.get('kasownik-ldap-member-{}/{}'
.format(member, field))
field_cache = mc.get("kasownik-ldap-member-{}/{}".format(member, field))
if field_cache is not None:
fields_out[field] = field_cache
fields_needed.remove(field)
member = member.replace('(', '').replace(')', '')
lfilter = '(&(uid={}){})'.format(member, app.config['LDAP_USER_FILTER'])
data = c.search_s(app.config['LDAP_USER_BASE'], ldap.SCOPE_SUBTREE,
lfilter, tuple(fields))
member = member.replace("(", "").replace(")", "")
lfilter = "(&(uid={}){})".format(member, app.config["LDAP_USER_FILTER"])
data = c.search_s(
app.config["LDAP_USER_BASE"], ldap.SCOPE_SUBTREE, lfilter, tuple(fields)
)
for dn, obj in data:
for k, v in obj.items():
v = v[0].decode('utf-8')
v = v[0].decode("utf-8")
if k in fields_needed:
fields_out[k] = v
if cache_enabled:
mc.set('kasownik-ldap-member-{}/{}'
.format(member, field), v)
mc.set("kasownik-ldap-member-{}/{}".format(member, field), v)
for k in fields_needed - set(fields_out.keys()):
fields_out[k] = None

46
web/webapp/email.py Normal file
View file

@ -0,0 +1,46 @@
import smtplib
from email.message import EmailMessage
import flask
import datetime
from typing import Optional
from webapp import app
cached_connection: Optional[smtplib.SMTP] = None
def test_connection_open(conn: smtplib.SMTP) -> bool:
try:
status = conn.noop()[0]
except:
status = -1
return True if status == 250 else False
def create_connection() -> smtplib.SMTP:
print("Connecting to SMTP...")
conn = smtplib.SMTP_SSL(app.config["SMTP_SERVER"])
conn.login(app.config["SMTP_USER"], app.config["SMTP_PASSWORD"])
return conn
def get_connection() -> smtplib.SMTP:
global cached_connection
if cached_connection is not None and test_connection_open(cached_connection):
return cached_connection
cached_connection = create_connection()
return cached_connection
def send_email(
conn: smtplib.SMTP, subject: str, body: str, recipient_emails: str
) -> None:
msg = EmailMessage()
msg.set_content(body)
msg["Subject"] = subject
msg["From"] = f"Warsaw Hackerspace Kasownik <kasownik@hackerspace.pl>"
msg["To"] = recipient_emails
conn.send_message(msg)

View file

@ -11,7 +11,7 @@
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
@ -22,31 +22,46 @@
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from wtforms import Form, BooleanField, TextField, PasswordField, SelectMultipleField, FormField, validators, widgets
from wtforms import (
Form,
BooleanField,
TextField,
PasswordField,
SelectMultipleField,
FormField,
validators,
widgets,
)
from flask_wtf import Form as FlaskForm
class MultiCheckboxField(SelectMultipleField):
widget = widgets.ListWidget(prefix_label=False)
option_widget = widgets.CheckboxInput()
class LoginForm(FlaskForm):
username = TextField('Username', [validators.Required()])
password = PasswordField('Password', [validators.Required()])
username = TextField("Username", [validators.Required()])
password = PasswordField("Password", [validators.Required()])
class ContactEmailSettingsForm(FlaskForm):
local = BooleanField("")
ldap = BooleanField("")
custom = TextField("Custom address:")
class LDAPSyncForm(FlaskForm):
fatty_to_add = MultiCheckboxField("Fatty to add", choices=[])
fatty_to_remove = MultiCheckboxField("Fatty to remove", choices=[])
starving_to_add = MultiCheckboxField("Starving to add", choices=[])
starving_to_remove = MultiCheckboxField("Starving to remove", choices=[])
class SpamForm(FlaskForm):
dry_run = BooleanField("Dry run")
members = MultiCheckboxField("Members to spam", coerce=int)
class AdminProfileEdit(FlaskForm):
alias = TextField("Alias")

View file

@ -11,7 +11,7 @@
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
@ -27,9 +27,14 @@
from webapp import app, db
from . import models
def get_unmatched_transfers():
return models.Transfer.query.filter_by(member_transfers=None,ignore=False) \
.order_by(models.Transfer.date.asc()).all()
return (
models.Transfer.query.filter_by(member_transfers=None, ignore=False)
.order_by(models.Transfer.date.asc())
.all()
)
def try_automatch(transfers):
matched = []
@ -48,7 +53,7 @@ def try_automatch(transfers):
for m in range(months):
mt = models.MemberTransfer(None, year, month, transfer)
member.transfers.append(mt)
year, month = member._yearmonth_increment((year,month))
year, month = member._yearmonth_increment((year, month))
matched.append(transfer)
else:
unmatched.append(transfer)

View file

@ -1,6 +1,3 @@
#!/usr/bin/env python2
# - * - coding=utf-8 - * -
# Copyright (c) 2015, Sergiusz Bazanski <q3k@q3k.org>
# Copyright (c) 2015, Remigiusz Marcinkiewicz <enleth@enleth.com>
# All rights reserved.
@ -15,7 +12,7 @@
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
@ -39,21 +36,10 @@ from webapp import app, db, cache, cache_enabled
from . import directory
class APIKey(db.Model):
id = db.Column(db.Integer, primary_key=True)
secret = db.Column(db.String(64))
member = db.Column(db.Integer, db.ForeignKey("member.id"))
description = db.Column(db.Text)
def __repr__(self):
return '<APIKey for %r %r>' % (self.member, self.description)
class MemberTransfer(db.Model):
__tablename__ = "member_transfer"
id = db.Column(db.Integer, primary_key=True)
member_id = db.Column('member', db.Integer, db.ForeignKey("member.id"))
member_id = db.Column("member", db.Integer, db.ForeignKey("member.id"))
transfer_id = db.Column(db.Integer, db.ForeignKey("transfer.id"))
year = db.Column(db.Integer)
month = db.Column(db.Integer)
@ -65,18 +51,20 @@ class MemberTransfer(db.Model):
self.month = month
self.transfer = transfer
from webapp import api
cache.delete_memoized(api._stats_for_month, year, month)
cache.delete_memoized(api.api_cashflow, year, month)
def __repr__(self):
return '<MemberTransfer %d/%d %s %r>' % (self.year, self.month, self.member, self.transfer)
return "<MemberTransfer %d/%d %s %r>" % (
self.year,
self.month,
self.member,
self.transfer,
)
class PaymentStatus(enum.Enum):
never_paid = 1 # never paid membership fees
unpaid = 2 # more than 3 fees unapid
okay = 3 # fees paid
never_paid = 1 # never paid membership fees
unpaid = 2 # more than 3 fees unapid
okay = 3 # fees paid
class PaymentPolicy(enum.Enum):
normal = "Normal"
@ -84,20 +72,24 @@ class PaymentPolicy(enum.Enum):
potato = "Potato"
disabled = "Disabled"
class MembershipType(enum.Enum):
fatty = "Fatty"
starving = "Starving"
supporting = "Supporting"
class Member(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), unique=True)
type = db.Column(db.Enum("starving", "fatty", "supporting", name="member_types"))
transfers = db.relationship("MemberTransfer", order_by=[
db.asc(MemberTransfer.year), db.asc(MemberTransfer.month)], backref='member')
transfers = db.relationship(
"MemberTransfer",
order_by=[db.asc(MemberTransfer.year), db.asc(MemberTransfer.month)],
backref="member",
)
active = db.Column(db.Boolean) # DEPRECATED donut use
api_keys = db.relationship("APIKey")
active = db.Column(db.Boolean) # DEPRECATED donut use
join_year = db.Column(db.Integer)
join_month = db.Column(db.Integer)
alias = db.Column(db.String(64))
@ -105,8 +97,12 @@ class Member(db.Model):
# Extended Grace Period - do not shut off account after grace period
# Potato - do not ever shut off account, report falsified payment status
# Disabled - manual disable override, regardless of payment extra
payment_policy = db.Column(db.Enum(*[p.value for p in PaymentPolicy.__members__.values()],
name='payment_policy_types'))
payment_policy = db.Column(
db.Enum(
*[p.value for p in PaymentPolicy.__members__.values()],
name="payment_policy_types"
)
)
preferred_email = db.Column(db.String(64))
def mt_covers(self, mt):
@ -116,11 +112,11 @@ class Member(db.Model):
ix = self.transfers.index(mt)
if ix != 0:
# check if the previous mt was covered by the same transfer
if self.transfers[ix-1].transfer.uid == mt.transfer.uid:
if self.transfers[ix - 1].transfer.uid == mt.transfer.uid:
return None
# check how many next mts use the same transfer
rowspan = 0
for ix2 in range(ix+1, len(self.transfers)):
for ix2 in range(ix + 1, len(self.transfers)):
if self.transfers[ix2].transfer.uid == mt.transfer.uid:
rowspan += 1
else:
@ -136,15 +132,15 @@ class Member(db.Model):
@param(deep) - whether to do a subqueryload_all and load all transfer data
"""
if deep:
return cls.query.options(subqueryload_all(
cls.transfers, MemberTransfer.transfer)).order_by(cls.username)
return cls.query.options(
subqueryload_all(cls.transfers, MemberTransfer.transfer)
).order_by(cls.username)
else:
return cls.query.order_by(cls.username)
def _yearmonth_increment(self, ym):
y, m = ym
y2, m2 = y, m+1
y2, m2 = y, m + 1
if m2 > 12:
y2 += 1
m2 = 1
@ -159,22 +155,25 @@ class Member(db.Model):
now = now_date.year * 12 + (now_date.month - 1)
status = {}
status['username'] = self.username
status['alias'] = self.alias
status['type'] = self.type
status['payment_policy'] = self.payment_policy
status["username"] = self.username
status["alias"] = self.alias
status["type"] = self.type
status["payment_policy"] = self.payment_policy
# First check - did we actually get any transfers?
if not self.transfers or self.transfers[0].transfer.uid == app.config['DUMMY_TRANSFER_UID']:
status['payment_status'] = PaymentStatus.never_paid.value
status['months_due'] = None
status['last_paid'] = (None, None)
if (
not self.transfers
or self.transfers[0].transfer.uid == app.config["DUMMY_TRANSFER_UID"]
):
status["payment_status"] = PaymentStatus.never_paid.value
status["months_due"] = None
status["last_paid"] = (None, None)
if self.join_year is not None and self.join_month is not None:
status['joined'] = (self.join_year, self.join_month)
status['next_unpaid'] = status['joined']
status["joined"] = (self.join_year, self.join_month)
status["next_unpaid"] = status["joined"]
else:
status['joined'] = (None, None)
status['next_unpaid'] = (None, None)
status['left'] = False
status["joined"] = (None, None)
status["next_unpaid"] = (None, None)
status["left"] = False
self._apply_judgement(status)
return status
@ -184,11 +183,11 @@ class Member(db.Model):
else:
joined = (self.transfers[0].year, self.transfers[0].month)
joined_scalar = self._yearmonth_scalar(joined)
status['joined'] = joined
status["joined"] = joined
if len(self.transfers[-1].transfer.uid) == 64:
status['last_transfer_bank'] = 'mBank'
status["last_transfer_bank"] = "mBank"
else:
status['last_transfer_bank'] = 'IdeaBank'
status["last_transfer_bank"] = "IdeaBank"
most_recent_transfer = (0, 0)
unpaid_months = 0
@ -207,13 +206,13 @@ class Member(db.Model):
most_recent_scalar = self._yearmonth_scalar(most_recent_transfer)
# Is this transfer a „not a member anymore” transfer?
if this_uid == app.config['DUMMY_TRANSFER_UID']:
if this_uid == app.config["DUMMY_TRANSFER_UID"]:
active_payment = False
continue
# Is this the first transfer? See if it was done on time
if previous_uid is None:
unpaid_months += (this_scalar - joined_scalar)
unpaid_months += this_scalar - joined_scalar
# Apply any missing payments
if active_payment and previous_uid is not None:
@ -232,56 +231,60 @@ class Member(db.Model):
# Apply missing payments from now
if active_payment:
previous_scalar = self._yearmonth_scalar(previous_transfer)
unpaid_months += (now - previous_scalar)
unpaid_months += now - previous_scalar
fees = app.config['MEMBERSHIP_FEES']
fees = app.config["MEMBERSHIP_FEES"]
status['months_due'] = unpaid_months
status['money_due'] = fees.get(self.type, 0) * unpaid_months
status['payment_status'] = PaymentStatus.okay.value if unpaid_months < 4 else PaymentStatus.unpaid.value
status['last_paid'] = most_recent_transfer
status['left'] = not active_payment
status["months_due"] = unpaid_months
status["money_due"] = fees.get(self.type, 0) * unpaid_months
status["payment_status"] = (
PaymentStatus.okay.value
if unpaid_months < 4
else PaymentStatus.unpaid.value
)
status["last_paid"] = most_recent_transfer
status["left"] = not active_payment
if not active_payment:
status['next_unpaid'] = (now_date.year, now_date.month)
status["next_unpaid"] = (now_date.year, now_date.month)
else:
status['next_unpaid'] = self._yearmonth_increment(status['last_paid'])
status["next_unpaid"] = self._yearmonth_increment(status["last_paid"])
self._apply_judgement(status)
return status
def get_local_email(self):
return '{}@hackerspace.pl'.format(self.username)
return "{}@hackerspace.pl".format(self.username)
def get_ldap_email(self):
mra = directory.get_member_fields(g.ldap, self.username,'mailRoutingAddress')
mra = mra['mailRoutingAddress']
mra = directory.get_member_fields(g.ldap, self.username, "mailRoutingAddress")
mra = mra["mailRoutingAddress"]
if not mra:
return None
return mra
def get_custom_email(self):
if self.preferred_email not in ['local', 'ldap', '', None]:
if self.preferred_email not in ["local", "ldap", "", None]:
return self.preferred_email
else:
return None
def uses_local_email(self):
return self.preferred_email == 'local'
return self.preferred_email == "local"
def uses_ldap_email(self):
return self.preferred_email == 'ldap'
return self.preferred_email == "ldap"
def uses_custom_email(self):
return self.get_custom_email() is not None
def get_contact_email(self, adrtype = None):
def get_contact_email(self, adrtype=None):
email = None
if (self.uses_ldap_email() and adrtype is None) or adrtype == 'ldap':
if (self.uses_ldap_email() and adrtype is None) or adrtype == "ldap":
email = self.get_ldap_email()
elif (self.uses_local_email() and adrtype is None) or adrtype == 'local':
elif (self.uses_local_email() and adrtype is None) or adrtype == "local":
email = self.get_local_email()
elif (self.uses_custom_email() and adrtype is None) or adrtype == 'custom':
elif (self.uses_custom_email() and adrtype is None) or adrtype == "custom":
email = self.preferred_email
if email is None:
@ -291,7 +294,7 @@ class Member(db.Model):
def get_status(self, force_refresh=False):
"""It's better to call this after doing a full select of data."""
cache_key = 'kasownik-payment_status-{}'.format(self.username)
cache_key = "kasownik-payment_status-{}".format(self.username)
cache_data = cache.get(cache_key)
if cache_data and cache_enabled and not force_refresh:
data = json.loads(cache_data)
@ -300,41 +303,43 @@ class Member(db.Model):
cache_data = self._get_status_uncached()
from webapp import api
cache.delete_memoized(api.api_months_due, self.username)
cache.set(cache_key, json.dumps(cache_data))
return cache_data
def _apply_judgement(self, status):
if status['left']:
status['judgement'] = False
if status["left"]:
status["judgement"] = False
return
policy = status['payment_policy']
if policy == 'Normal':
if status['payment_status'] == PaymentStatus.okay.value \
and status['last_paid'][0] is not None:
status['judgement'] = True
policy = status["payment_policy"]
if policy == "Normal":
if (
status["payment_status"] == PaymentStatus.okay.value
and status["last_paid"][0] is not None
):
status["judgement"] = True
else:
status['judgement'] = False
elif policy == 'Extended Grace Period':
status['judgement'] = True
elif policy == 'Potato':
status['judgement'] = True
status['months_due'] = 0
status["judgement"] = False
elif policy == "Extended Grace Period":
status["judgement"] = True
elif policy == "Potato":
status["judgement"] = True
status["months_due"] = 0
else:
status['judgement'] = False
status["judgement"] = False
def get_months_due(self):
status = self.get_status()
return status['months_due']
return status["months_due"]
def get_last_paid(self):
status = self.get_status()
return status['last_paid']
return status["last_paid"]
def get_next_unpaid(self):
status = self.get_status()
return status['next_unpaid']
return status["next_unpaid"]
def __init__(self, _id, _username, _type, _active):
self.id = _id
@ -350,7 +355,7 @@ class Member(db.Model):
return self.username
def __repr__(self):
return '<Member %s>' % self.username
return "<Member %s>" % self.username
class Transfer(db.Model):
@ -363,7 +368,9 @@ class Transfer(db.Model):
date = db.Column(db.Date)
ignore = db.Column(db.Boolean)
def __init__(self, _id, _uid, _account_from, _name_from, _amount, _title, _date, _ignore):
def __init__(
self, _id, _uid, _account_from, _name_from, _amount, _title, _date, _ignore
):
self.id = _id
self.uid = _uid
self.account_from = _account_from
@ -377,37 +384,57 @@ class Transfer(db.Model):
return self.uid[:16]
def parse_title(self):
m = re.match(r"^([a-z0-9ąężźćóżłśń\-_\.]+)[ -]+(fatty|starving|superfatty|supporting|supporter)[ -]+([0-9a-z\-_ąężźćóżłśń \(\),/\.]+$)", self.title.strip().lower())
m = re.match(
r"^([a-z0-9ąężźćóżłśń\-_\.]+)[ -]+(fatty|starving|superfatty|supporting|supporter)[ -]+([0-9a-z\-_ąężźćóżłśń \(\),/\.]+$)",
self.title.strip().lower(),
)
if not m:
return (None, None, None)
member, _type, title = m.group(1), m.group(2), m.group(3)
if title in [u"składka", u"opłata", u"opłata miesięczna", "skladka"]:
if title in ["składka", "opłata", "opłata miesięczna", "skladka"]:
return (member, _type, None)
return member, _type, title
MATCH_OK, MATCH_WRONG_TYPE, MATCH_NO_USER, MATCH_UNPARSEABLE, MATCH_KNOWN_UNPARSEABLE = range(5)
(
MATCH_OK,
MATCH_WRONG_TYPE,
MATCH_NO_USER,
MATCH_UNPARSEABLE,
MATCH_KNOWN_UNPARSEABLE,
) = range(5)
def get_matchability(self):
title = self.parse_title()
if not title[0]:
similar = self.get_similar().first()
if similar:
return self.MATCH_KNOWN_UNPARSEABLE, similar.member_transfers[0].member, 0
return (
self.MATCH_KNOWN_UNPARSEABLE,
similar.member_transfers[0].member,
0,
)
return self.MATCH_UNPARSEABLE, self.title, 0
member_name = title[0]
member = Member.query.filter(or_(Member.username==member_name, Member.alias==member_name)).first()
member = Member.query.filter(
or_(Member.username == member_name, Member.alias == member_name)
).first()
if not member:
return self.MATCH_NO_USER, member_name, 0
if title[2]:
return self.MATCH_WRONG_TYPE, member, 0
fees = app.config['MEMBERSHIP_FEES']
fees = app.config["MEMBERSHIP_FEES"]
for type_name, type_amount in fees.items():
if title[1] == type_name and self.amount >= (type_amount*100) and (self.amount % (type_amount*100)) == 0:
return self.MATCH_OK, member, int(self.amount/(type_amount*100))
if (
title[1] == type_name
and self.amount >= (type_amount * 100)
and (self.amount % (type_amount * 100)) == 0
):
return self.MATCH_OK, member, int(self.amount / (type_amount * 100))
return self.MATCH_WRONG_TYPE, member, 0
@ -415,9 +442,12 @@ class Transfer(db.Model):
"""Returns query of transfers with same account_from / name_from
field."""
return Transfer.query.filter(
(Transfer.name_from.ilike(self.name_from or '') |
(Transfer.account_from == self.account_from)) &
(Transfer.member_transfers != None)).order_by(Transfer.date.desc())
(
Transfer.name_from.ilike(self.name_from or "")
| (Transfer.account_from == self.account_from)
)
& (Transfer.member_transfers != None)
).order_by(Transfer.date.desc())
def __repr__(self):
return '<Transfer %s %r %s>' % (self.uid, self.title, self.date)
return "<Transfer %s %r %s>" % (self.uid, self.title, self.date)

View file

@ -10,7 +10,7 @@
<div class="container">
<div class="row">
<div class="col-md-3">
<img src="{{ member.get_contact_email() | gravatar }}" alt="gravatar" style="width: 70%; margin-top: 10px;" />
<img src="https://profile.hackerspace.pl/avatar/user/{{ member.username }}" alt="{{ member.username }}'s avatar" style="width: 70%; margin-top: 10px;" />
<h1>{{member.username}}{%if cn %}<br /><small>{{cn}}</small>{% endif%}</h1>
<h5>{{member.get_contact_email()}}</h5>
<h5>Joined in {{ member.join_year }}-{{ "%02i" | format(member.join_month) }}</h5>
@ -23,6 +23,11 @@
{% endif %}
</p>
{% if admin %}
<a href="https://profile.hackerspace.pl/admin/users/{{ member.username }}">
<span class="input-group-btn">
<button class="btn btn-default" type="button">Open in LDAPWeb</button>
</span>
</a>
<h4>Payment Policy</h4>
{% include "button_payment_policy.html" %}
<h4>Membership Type</h4>
@ -40,6 +45,11 @@
</form>
</p>
{% else %}
<a href="https://profile.hackerspace.pl/vcard">
<span class="input-group-btn">
<button class="btn btn-default" type="button">Edit Profile</button>
</span>
</a>
<h4>Payment Policy</h4>
<h5>{{status.payment_policy}}</h5>
<h4>Membership Type</h4>

View file

@ -1,35 +1,37 @@
Siemasz {{ member.username }},
Hi {{ member.username }},
automatycznie wygenerowałem raport ze stanu składek dla Twojego konta.
Oto stan na dzień {{ now.strftime('%d/%m/%Y') }}:
Here's an (automatically genered) report about your membership fees as of {{ now.strftime('%Y-%m-%d') }}:
{% if status['months_due'] > 0 -%}
Jesteś {{ status['months_due']|inflect('składkę', 'składki', 'składek') }}
{%- if status['money_due'] %} ({{ status['money_due'] }} PLN){% endif %} do tyłu. Kiepsko.
You are {{ status['months_due'] }} months
{%- if status['money_due'] %} ({{ status['money_due'] }} PLN){% endif %} behind. {% if status['months_due'] > 2 -%}Oof!{% endif %}
{% elif status['months_due'] == 0 -%}
Jesteś na bieżąco ze składkami. Hura!
You are up to date with your membership fees, yay!
{% else -%}
Jesteś do przodu ze składkami. Świetnie!
You are {{ -status['months_due'] }} months ahead with your membership fees. Cool!
{% endif -%}
{% if status['months_due'] > 2 %}
{%- if status['months_due'] > 2 %}
According to Warsaw Hackerspace bylaws, if you are three months behind on your membership fees, you
will be automatically removed from membership. Your door and service access will also be revoked.
Zgodnie z regulaminem HS, trzymiesięczna zaległość w składkach oznacza
automatyczne wykreślenie z listy członków i usunięcie karty z zamka.
Masz tydzień na uregulowanie składki od daty wysłania tego emaila.
Please catch up on your membership fees as soon as possible!
{% endif %}
Oto szczegółowe informacje o Twoich ostatnich wpłatach:
Here's detailed information about your recent transfers:
{% for t in transfers %}
- opłata za {{ t.month }}/{{ t.year }}, pokryta przelewem za {{
'%.02f'|format(t.transfer.amount/100) }} PLN w dniu {{ t.transfer.date.strftime('%d/%m/%Y') }}
- membership fee for {{ t.year }}-{{ t.month }}, covered by a {{
'%.02f'|format(t.transfer.amount/100) }} PLN transfer on {{ t.transfer.date.strftime('%Y-%m-%d') }}
{%- endfor %}
Jeśli coś się nie zgadza, odpisz na tego mejla z pretensjami - wiadomość trafi
do naszego białkowego skarbnika który postara się ustalić, co poszło źle.
Jednocześnie przypominam, że trzymiesięczna zaległość w płaceniu oznacza
wykreślenie z listy członków - automatyczną!
Here's our wire transfer information:
- STOWARZYSZENIE WARSZAWSKI HACKERSPACE
- 48 1950 0001 2006 0006 4889 0002
- {{ member.username }} - {{ member.type }} - składka
xoxoxoxo,
Hackerspace'owy Kasownik
--
„100 linii pythona!” - enki o skrypcie do składek
For more information, see https://wiki.hackerspace.pl/finanse
If something is wrong with the above, please respond to this email with your grievances. Your message
will be handled by a protein-based lifeform, who will do their best to figure out what went wrong.
Kind regards,
Kasownik, Warsaw Hackerspace's Membership Fees Automaton

View file

@ -1,5 +1,3 @@
# - * - coding=utf-8 - * -
# Copyright (c) 2015, Sergiusz Bazanski <q3k@q3k.org>
# All rights reserved.
#
@ -27,29 +25,43 @@
import requests
import os.path
from flask import Response, request, redirect, flash, render_template, url_for, abort, g, jsonify
from flask import (
Response,
request,
redirect,
flash,
render_template,
url_for,
abort,
g,
jsonify,
)
from flask_login import login_user, login_required, logout_user, current_user
from webapp import app, forms, User, models, cache, db
from . import directory
@app.route('/')
def stats():
return render_template('stats.html')
@app.route('/cursed-plot.json')
@app.route("/")
@login_required
def stats():
return render_template("stats.html")
@app.route("/cursed-plot.json")
@login_required
@cache.cached()
def plot():
with open(os.path.join(os.path.dirname(__file__), 'cursed-query.sql')) as fd:
cursor = db.session.execute(fd.read(), {
'EURPLN_RATE': 4.3,
'START_DATE': '2018-01-01'
})
with open(os.path.join(os.path.dirname(__file__), "cursed-query.sql")) as fd:
cursor = db.session.execute(
fd.read(), {"EURPLN_RATE": 4.3, "START_DATE": "2018-01-01"}
)
result = cursor.fetchall()
columns = cursor.keys()
print(columns)
return jsonify([dict(zip(columns, element)) for element in result])
@app.route('/memberlist')
@app.route("/memberlist")
@login_required
@cache.cached()
def memberlist():
@ -57,20 +69,22 @@ def memberlist():
result = []
for member in members:
element = member.get_status()
if not element['judgement']:
if not element["judgement"]:
continue
result.append(element)
return render_template('memberlist.html', active_members=result)
return render_template("memberlist.html", active_members=result)
@app.route('/profile', methods=['POST', 'GET'])
@app.route("/profile", methods=["POST", "GET"])
@login_required
def self_profile():
member = current_user.get_model()
if not member:
abort(404)
status = member.get_status()
cn = directory.get_member_fields(g.ldap, member.username, 'cn')['cn']
cn = directory.get_member_fields(g.ldap, member.username, "cn")["cn"]
return render_template("admin_member.html", member=member, status=status,
cn=cn, admin=False)
return render_template(
"admin_member.html", member=member, status=status, cn=cn, admin=False
)

View file

@ -1,8 +1,10 @@
# FIXME we need to upgrade Flask_SQLAlchemy to get rid of deprecation warnings
#import warnings
#from flask.exthook import ExtDeprecationWarning
#warnings.simplefilter("ignore", ExtDeprecationWarning)
# import warnings
# from flask.exthook import ExtDeprecationWarning
# warnings.simplefilter("ignore", ExtDeprecationWarning)
import logging
logging.basicConfig(level=logging.INFO)
import webapp
app = webapp.create_app()