Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def load(bean, portfolio):
entries, errors, options_map = loader.load_file(bean)
if _missing_operating_currency(options_map):
logging.error("Missing operating_currency")
exit(1)
targets = get_allocation_directives(entries, portfolio)
allocations = get_allocations(entries, options_map, portfolio)
total = allocations.total_invested_for_portfolio()
return (targets, allocations, total)
"""
logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s')
parser = argparse.ArgumentParser(description=__doc__.strip())
parser.add_argument('filename', help='Beancount input filename')
oparser = parser.add_argument_group('Outputs')
oparser.add_argument('-o', '--output', action='store',
help="Filename to output results to (default goes to stdout)")
oparser.add_argument('-f', '--format', default='text',
choices=['text', 'csv'],
help="Output format to render to (text, csv)")
args = parser.parse_args()
# Load the input file.
entries, errors, options_map = loader.load_file(args.filename)
# Get the list of trades.
trades = extract_trades(entries)
# Produce a table of all the trades.
columns = ('units currency cost_currency '
'buy_date buy_price sell_date sell_price pnl').split()
header = ['Units', 'Currency', 'Cost Currency',
'Buy Date', 'Buy Price', 'Sell Date', 'Sell Price',
'P/L']
body = []
for aug, red in trades:
units = -red.posting.units.number
buy_price = aug.posting.price.number
sell_price = red.posting.price.number
pnl = (units * (sell_price - buy_price)).quantize(buy_price)
sql = """
SELECT
year, month, root(account, 1) as ar, sum(position) as pos
FROM
date > {date_from} AND date < {date_to}
WHERE
account ~ "Expenses" OR
account ~ "Liabilities:Loans" OR
account ~ "Income"
GROUP BY year, month, ar
ORDER BY year, month, ar
FLATTEN
""".format(**locals())
# Load the file and run a query on it.
entries, _, options_map = loader.load_file(args.filename)
rtypes, rrows = query.run_query(entries, options_map, sql)
# Pivot on the year/month + currency
out = collections.defaultdict(lambda: collections.defaultdict(dict))
for row in rrows:
d = out['{}/{:02d}'.format(row.year, row.month)][row.pos.lot.currency]
d[row.ar] = row.pos.number
# Write this out to a CSV file.
wr = csv.writer(sys.stdout)
for month, currencies in sorted(out.items()):
for currency, accounts in sorted(currencies.items()):
exp = accounts.get('Expenses', ZERO)
loans = accounts.get('Liabilities:Loans', ZERO)
inc = accounts.get('Income', ZERO)
wr.writerow((month, exp, loans, -inc, exp + loans - inc, currency))
def main():
logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s')
parser = argparse.ArgumentParser(description=__doc__.strip())
parser.add_argument('filename', help='Ledger filename')
args = parser.parse_args()
entries, errors, options_map = loader.load_file(args.filename)
for entry in entries:
if isinstance(entry, data.Transaction):
txn = convert_transaction(entry)
if 0:
print('-' * 100)
printer.print_entry(entry)
print(txn)
print()
oparser.add_argument('-o', '--output-text', '--text', action='store',
help="Render results to text boxes")
oparser.add_argument('--output-csv', '--csv', action='store',
help="Render results to CSV files")
oparser.add_argument('--output-stdout', '--stdout', action='store_true',
help="Render results to stdout")
args = parser.parse_args()
# Ensure the directories exist.
for directory in [args.output_text, args.output_csv]:
if directory and not path.exists(directory):
os.makedirs(directory, exist_ok=True)
# Load the input file and get the list of participants.
entries, errors, options_map = loader.load_file(args.filename)
participants = get_participants(args.filename, options_map)
for participant in participants:
print("Participant: {}".format(participant))
save_query("balances", participant, entries, options_map, r"""
SELECT
PARENT(account) AS account,
CONV[SUM(position)] AS amount
WHERE account ~ ':\b{}'
GROUP BY 1
ORDER BY 2 DESC
""", participant, boxed=False, args=args)
save_query("expenses", participant, entries, options_map, r"""
SELECT
('-p', 'prices'),
('-r', 'rates'),
('-m', 'postings')]:
parser.add_argument(
shortname, '--output_{}'.format(longname),
type=argparse.FileType('w'),
help="CSV filename to write out the {} table to.".format(longname))
parser.add_argument('-o', '--output',
type=argparse.FileType('w'),
help="CSV filename to write out the final joined table to.")
args = parser.parse_args()
# Load the file contents.
entries, errors, options_map = loader.load_file(args.filename)
validate_entries(entries)
# Initialize main output currency.
main_currency = args.currency or options_map['operating_currency'][0]
logging.info("Operating currency: %s", main_currency)
# Get the map of commodities to their meta tags.
commodities_table = get_commodities_table(
entries, ['export', 'assetcls', 'strategy', 'issuer'])
if args.output_commodities is not None:
write_table(commodities_table, args.output_commodities)
# Get the map of accounts to their meta tags.
accounts_table, accounts_map = get_accounts_table(
entries, ['tax', 'liquid'])
if args.output_accounts is not None:
def main():
import argparse, logging
logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s')
parser = argparse.ArgumentParser(description=__doc__.strip())
parser.add_argument('filename', help='Ledger filename')
args = parser.parse_args()
entries, errors, options_map = loader.load_file(args.filename)
for entry in entries:
if (isinstance(entry, data.Transaction)
and any(posting.position.lot.lot_date
for posting in entry.postings)):
printer.print_entry(entry)
default=None,
help=("Beginning date of the period to compute returns over "
"(default is the first related directive)"))
parser.add_argument('--date-end', '--end-date',
action='store', type=date_utils.parse_date_liberally,
default=None,
help=("End date of the period to compute returns over "
"(default is the last related directive)"))
args = parser.parse_args()
logging.basicConfig(level=logging.DEBUG if args.verbose else logging.INFO,
format='%(levelname)-8s: %(message)s')
# Load the input file and build the price database.
entries, errors, options_map = loader.load_file(args.filename, log_errors=logging.error)
# Extract the account names using the regular expressions.
racc = regexps_to_accounts(
entries, args.regexp_value, args.regexp_internal, args.regexp_internalize)
# Compute the returns using the explicit configuration.
returns, (date_first, date_last) = compute_timeline_and_returns(
entries, options_map,
args.transfer_account,
racc.value, racc.internal, racc.internalize,
args.date_begin, args.date_end)
# Annualize the returns.
annual_returns = annualize_returns(returns, date_first, date_last)
print('Total returns from {} to {}:'.format(date_first, date_last))
def main():
parser = version.ArgumentParser(description=__doc__)
parser.add_argument('filename',
help='Beancount input filename')
parser.add_argument('database',
help='Filename of database file to create')
args = parser.parse_args()
logging.basicConfig(level=logging.INFO, format='%(levelname)-8s: %(message)s')
entries, errors, options_map = loader.load_file(args.filename,
log_timings=logging.info,
log_errors=sys.stderr)
# Delete previous database if it already exists.
if path.exists(args.database):
os.remove(args.database)
# The only supported DBAPI-2.0 backend for now is SQLite3.
connection = dbapi.connect(args.database)
setup_decimal_support()
for function in [
output_common,
output_transactions,
OpenWriter(),
CloseWriter(),
help="End date; if not set, at the end of star'ts year")
parser.add_argument('-o', '--output', action='store',
help="Output directory of all the reports, in txt and csv formats")
args = parser.parse_args()
calculate_commission = False
# Setup date interval.
if args.start is None:
args.start = datetime.date(datetime.date.today().year, 1, 1)
if args.end is None:
args.end = datetime.date(args.start.year + 1, 1, 1)
entries, errors, options_map = loader.load_file(args.filename)
# Create the list of sales.
sales = expand_sales_legs(entries, args.account, args.start, args.end,
calculate_commission)
# Produce a detailed table.
lots, tab_detail, totals = create_detailed_table(sales, calculate_commission)
# Aggregate by transaction in order to be able to cross-check against the
# 1099 forms.
agglots = aggregate_sales(lots)
tab_agg = table.create_table(sorted(agglots, key=lambda lot: (lot.ref, lot.no)),
fieldspec)
# Create a summary table of P/L.
tab_summary = create_summary_table(totals)