Here are some examples of putting it all together to create readable and powerful pipelines.

Data Transformations

>>> from functional_pipeline import pipeline, not_none, lens

>>> people = [
...     { 
...         'first_name': 'John', 
...         'last_name': 'Smith', 
...         'age': 32, 
...         'employment': [
...             {'name': 'McDonalds', 'position': 'Manager'},
...             {'name': 'CSV', 'position': 'Cashier'}
...         ]
...     },
...     { 
...         'first_name': 'Jane', 
...         'last_name': 'Smith', 
...         'age': 30, 
...         'employment': [
...             {'name': 'BurgerKing', 'position': 'Manager'}
...         ]
...     },
...     {
...         'first_name': 'Billy', 
...         'last_name': 'Bob', 
...         'age': 55, 
...         'employment': [
...             {'name': 'Microsoft', 'position': 'Programmer'}
...         ]
...     },
...     {
...         'first_name': 'Jill', 
...         'last_name': 'Jones', 
...         'age': 21, 
...         'employment': []
...     },
... ]

>>> full_names_with_employment_history = pipeline(
...     people,
...     [
...         (filter, lambda x: not_none(lens('employment.0')(x))),
...         (map, lambda x: f"{x['first_name']} {x['last_name']}"),
...         list,
...     ]
... )

>>> full_names_with_employment_history
['John Smith', 'Jane Smith', 'Billy Bob']

File Processing

import os
from funcitonal_pipeline import pipeline

def read_file(filename: str) -> Optional[str]:
    try:
        with open(filename, 'r') as f:
            return f.read()
    except Exception:
        return None


errors = pipeline(
    os.listdir('/var/log/')
    [
        (filter, String.endswith('.log')),
        (map, read_file),
        (filter, not_none),
        (map, String.split('\n')),
        flatten, 
        (filter, contains('ERROR')),
        list,
    ]
)