def tokenize(s): return [ int(x) if x.isdigit() else x for x in s.replace('(', '( ').replace(')', ' )').split() ] def eval_parens(block, eval_flat): stack = [[]] for token in block: if token == '(': stack.append([]) elif token == ')': subblock = stack.pop() value = eval_parens(subblock, eval_flat) stack[-1].append(value) else: stack[-1].append(token) assert len(stack) == 1 return eval_flat(stack[0]) def eval_flat_1(block): ret = block[0] for i in range(len(block) // 2): op = block[2 * i + 1] value = block[2 * i + 2] if op == '*': ret *= value else: ret += value return ret def eval_flat_2(block): i = 1 while i < len(block): if block[i] == '+': block.pop(i) left = block.pop(i - 1) right = block.pop(i - 1) block.insert(i - 1, left + right) else: i += 2 ret = 1 for x in block: if isinstance(x, int): ret *= x return ret with open('input.txt') as fh: lines = [tokenize(line.rstrip()) for line in fh] print(sum(eval_parens(line, eval_flat_1) for line in lines)) print(sum(eval_parens(line, eval_flat_2) for line in lines))