diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..e6d7272b0f5615cf6ed577eedc54a600000f8453 --- /dev/null +++ b/.gitignore @@ -0,0 +1,12 @@ +__pycache__/ +*.pyc +*.pyo +*.pyd +*.pth +*.pt +*.log +*.tmp +.env +.vscode/ +.idea/ +.DS_Store diff --git a/build/lib/segformer_plusplus/__init__.py b/build/lib/segformer_plusplus/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cd8a768648cca72d7cc393e7eb85b79b434d9fc7 --- /dev/null +++ b/build/lib/segformer_plusplus/__init__.py @@ -0,0 +1,4 @@ +from .build_model import create_model, create_custom_model +from .random_benchmark import random_benchmark + +__all__ = ['create_model', 'create_custom_model', 'random_benchmark'] diff --git a/build/lib/segformer_plusplus/build_model.py b/build/lib/segformer_plusplus/build_model.py new file mode 100644 index 0000000000000000000000000000000000000000..32ad4eeca728fce54caf78fe49f3f372d21047d9 --- /dev/null +++ b/build/lib/segformer_plusplus/build_model.py @@ -0,0 +1,108 @@ +import os + +from mmengine import registry +from mmengine.config import Config +from mmengine.model import BaseModule + +from .utils import MODELS, imagenet_weights +from .utils import tome_presets + + +class SegFormer(BaseModule): + """ + This class represents a SegFormer model that allows for the application of token merging. + + Attributes: + backbone (BaseModule): MixVisionTransformer backbone + decode_head (BaseModule): SegFormer head + + """ + def __init__(self, cfg): + """ + Initialize the SegFormer model. + + Args: + cfg (Config): an mmengine Config object, which defines the backbone, head and token merging strategy used. + + """ + super().__init__() + self.backbone = registry.build_model_from_cfg(cfg.backbone, registry=MODELS) + self.decode_head = registry.build_model_from_cfg(cfg.decode_head, registry=MODELS) + + def forward(self, x): + """ + Forward pass of the model. + + Args: + x (torch.Tensor): input tensor of shape [B, C, H, W] + + Returns: + torch.Tensor: output tensor + + """ + x = self.backbone(x) + x = self.decode_head(x) + return x + + +def create_model( + backbone: str = 'b0', + tome_strategy: str = None, + out_channels: int = 19, + pretrained: bool = False, +): + """ + Create a SegFormer model using the predefined SegFormer backbones from the MiT series (b0-b5). + + Args: + backbone (str): backbone name (e.g. 'b0') + tome_strategy (str | list(dict)): select strategy from presets ('bsm_hq', 'bsm_fast', 'n2d_2x2') or define a + custom strategy using a list, that contains of dictionaries, in which the strategies for the stage are + defined + out_channels (int): number of output channels (e.g. 19 for the cityscapes semantic segmentation task) + pretrained: use pretrained (imagenet) weights + + Returns: + BaseModule: SegFormer model + + """ + backbone = backbone.lower() + assert backbone in [f'b{i}' for i in range(6)] + + wd = os.path.dirname(os.path.abspath(__file__)) + + cfg = Config.fromfile(os.path.join(wd, 'configs', f'segformer_mit_{backbone}.py')) + + cfg.decode_head.out_channels = out_channels + + if tome_strategy is not None: + if tome_strategy not in list(tome_presets.keys()): + print("Using custom merging strategy.") + cfg.backbone.tome_cfg = tome_presets[tome_strategy] + + # load imagenet weights + if pretrained: + cfg.backbone.init_cfg = dict(type='Pretrained', checkpoint=imagenet_weights[backbone]) + + return SegFormer(cfg) + + +def create_custom_model( + model_cfg: Config, + tome_strategy: list[dict] = None, +): + """ + Create a SegFormer model with customizable backbone and head. + + Args: + model_cfg (Config): backbone name (e.g. 'b0') + tome_strategy (list(dict)): custom token merging strategy + + Returns: + BaseModule: SegFormer model + + """ + if tome_strategy is not None: + model_cfg.backbone.tome_cfg = tome_strategy + + return SegFormer(model_cfg) diff --git a/build/lib/segformer_plusplus/configs/__init__.py b/build/lib/segformer_plusplus/configs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b680692d5ff945d85311a8c90c70766275444498 --- /dev/null +++ b/build/lib/segformer_plusplus/configs/__init__.py @@ -0,0 +1 @@ +__all__ = [] \ No newline at end of file diff --git a/build/lib/segformer_plusplus/configs/segformer_mit_b0.py b/build/lib/segformer_plusplus/configs/segformer_mit_b0.py new file mode 100644 index 0000000000000000000000000000000000000000..f4eb059010ad97d0360e779cbcca82e3a2b50717 --- /dev/null +++ b/build/lib/segformer_plusplus/configs/segformer_mit_b0.py @@ -0,0 +1,28 @@ +norm_cfg = dict(type='SyncBN', requires_grad=True) +backbone = dict( + type='MixVisionTransformer', + in_channels=3, + embed_dims=32, + num_stages=4, + num_layers=[2, 2, 2, 2], + num_heads=[1, 2, 5, 8], + patch_sizes=[7, 3, 3, 3], + sr_ratios=[8, 4, 2, 1], + out_indices=(0, 1, 2, 3), + mlp_ratio=4, + qkv_bias=True, + drop_rate=0.0, + attn_drop_rate=0.0, + drop_path_rate=0.1 +) +decode_head = dict( + type='SegformerHead', + in_channels=[32, 64, 160, 256], + in_index=[0, 1, 2, 3], + channels=256, + dropout_ratio=0.1, + out_channels=19, + norm_cfg=norm_cfg, + align_corners=False, + interpolate_mode='bilinear' +) diff --git a/build/lib/segformer_plusplus/configs/segformer_mit_b1.py b/build/lib/segformer_plusplus/configs/segformer_mit_b1.py new file mode 100644 index 0000000000000000000000000000000000000000..4ec3214aff7f298cf429b836e51e84cea5aeb771 --- /dev/null +++ b/build/lib/segformer_plusplus/configs/segformer_mit_b1.py @@ -0,0 +1,8 @@ +_base_ = ['./segformer_mit_b0.py'] + +backbone = dict( + embed_dims=64, +) +decode_head = dict( + in_channels=[64, 128, 320, 512] +) diff --git a/build/lib/segformer_plusplus/configs/segformer_mit_b2.py b/build/lib/segformer_plusplus/configs/segformer_mit_b2.py new file mode 100644 index 0000000000000000000000000000000000000000..230e1e50daba53993126d58efc9bed642cb9f4ca --- /dev/null +++ b/build/lib/segformer_plusplus/configs/segformer_mit_b2.py @@ -0,0 +1,6 @@ +_base_ = ['./segformer_mit_b1.py'] + +backbone = dict( + embed_dims=64, + num_layers=[3, 4, 6, 3] +) diff --git a/build/lib/segformer_plusplus/configs/segformer_mit_b3.py b/build/lib/segformer_plusplus/configs/segformer_mit_b3.py new file mode 100644 index 0000000000000000000000000000000000000000..5c877f9d12459c9508a7627a3131144e94768856 --- /dev/null +++ b/build/lib/segformer_plusplus/configs/segformer_mit_b3.py @@ -0,0 +1,6 @@ +_base_ = ['./segformer_mit_b1.py'] + +backbone = dict( + embed_dims=64, + num_layers=[3, 4, 18, 3] +) diff --git a/build/lib/segformer_plusplus/configs/segformer_mit_b4.py b/build/lib/segformer_plusplus/configs/segformer_mit_b4.py new file mode 100644 index 0000000000000000000000000000000000000000..897e59765578c96a5a9a17ffb8cf6aceecc81e3b --- /dev/null +++ b/build/lib/segformer_plusplus/configs/segformer_mit_b4.py @@ -0,0 +1,6 @@ +_base_ = ['./segformer_mit_b1.py'] + +backbone = dict( + embed_dims=64, + num_layers=[3, 8, 27, 3] +) diff --git a/build/lib/segformer_plusplus/configs/segformer_mit_b5.py b/build/lib/segformer_plusplus/configs/segformer_mit_b5.py new file mode 100644 index 0000000000000000000000000000000000000000..7f0762237dd35e601e3153ca2e52a89eac365e1d --- /dev/null +++ b/build/lib/segformer_plusplus/configs/segformer_mit_b5.py @@ -0,0 +1,6 @@ +_base_ = ['./segformer_mit_b1.py'] + +backbone = dict( + embed_dims=64, + num_layers=[3, 6, 40, 3] +) diff --git a/build/lib/segformer_plusplus/model/__init__.py b/build/lib/segformer_plusplus/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b680692d5ff945d85311a8c90c70766275444498 --- /dev/null +++ b/build/lib/segformer_plusplus/model/__init__.py @@ -0,0 +1 @@ +__all__ = [] \ No newline at end of file diff --git a/build/lib/segformer_plusplus/model/backbone/__init__.py b/build/lib/segformer_plusplus/model/backbone/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8fc6fa95ea87f1419517325a0a49915b94996d74 --- /dev/null +++ b/build/lib/segformer_plusplus/model/backbone/__init__.py @@ -0,0 +1,3 @@ +from .mit import MixVisionTransformer + +__all__ = ['MixVisionTransformer'] \ No newline at end of file diff --git a/build/lib/segformer_plusplus/model/backbone/mit.py b/build/lib/segformer_plusplus/model/backbone/mit.py new file mode 100644 index 0000000000000000000000000000000000000000..239284919f6c5d14a7873305d7fbdf0ac8c4bd7f --- /dev/null +++ b/build/lib/segformer_plusplus/model/backbone/mit.py @@ -0,0 +1,479 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math + +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from mmcv.cnn import Conv2d, build_activation_layer, build_norm_layer +from mmcv.cnn.bricks.drop import build_dropout +from mmcv.cnn.bricks.transformer import MultiheadAttention +from mmengine.model import BaseModule, ModuleList, Sequential +from mmengine.model.weight_init import (constant_init, normal_init, + trunc_normal_init) +from tomesd.merge import bipartite_soft_matching_random2d + +from ...utils import PatchEmbed +from ...utils import nchw_to_nlc, nlc_to_nchw +from ...utils import MODELS + +class MixFFN(BaseModule): + """An implementation of MixFFN of Segformer. + + The differences between MixFFN & FFN: + 1. Use 1X1 Conv to replace Linear layer. + 2. Introduce 3X3 Conv to encode positional information. + Args: + embed_dims (int): The feature dimension. Same as + `MultiheadAttention`. Defaults: 256. + feedforward_channels (int): The hidden dimension of FFNs. + Defaults: 1024. + act_cfg (dict, optional): The activation config for FFNs. + Default: dict(type='ReLU') + ffn_drop (float, optional): Probability of an element to be + zeroed in FFN. Default 0.0. + dropout_layer (obj:`ConfigDict`): The dropout_layer used + when adding the shortcut. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + """ + + def __init__(self, + embed_dims, + feedforward_channels, + act_cfg=dict(type='GELU'), + ffn_drop=0., + dropout_layer=None, + init_cfg=None): + super().__init__(init_cfg) + + self.embed_dims = embed_dims + self.feedforward_channels = feedforward_channels + self.act_cfg = act_cfg + self.activate = build_activation_layer(act_cfg) + + in_channels = embed_dims + fc1 = Conv2d( + in_channels=in_channels, + out_channels=feedforward_channels, + kernel_size=1, + stride=1, + bias=True) + # 3x3 depth wise conv to provide positional encode information + pe_conv = Conv2d( + in_channels=feedforward_channels, + out_channels=feedforward_channels, + kernel_size=3, + stride=1, + padding=(3 - 1) // 2, + bias=True, + groups=feedforward_channels) + fc2 = Conv2d( + in_channels=feedforward_channels, + out_channels=in_channels, + kernel_size=1, + stride=1, + bias=True) + drop = nn.Dropout(ffn_drop) + layers = [fc1, pe_conv, self.activate, drop, fc2, drop] + self.layers = Sequential(*layers) + self.dropout_layer = build_dropout( + dropout_layer) if dropout_layer else torch.nn.Identity() + + def forward(self, x, hw_shape, identity=None): + out = nlc_to_nchw(x, hw_shape) + out = self.layers(out) + out = nchw_to_nlc(out) + if identity is None: + identity = x + return identity + self.dropout_layer(out) + + +class EfficientMultiheadAttention(MultiheadAttention): + """An implementation of Efficient Multi-head Attention of Segformer. + + This module is modified from MultiheadAttention which is a module from + mmcv.cnn.bricks.transformer. + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. + attn_drop (float): A Dropout layer on attn_output_weights. + Default: 0.0. + proj_drop (float): A Dropout layer after `nn.MultiheadAttention`. + Default: 0.0. + dropout_layer (obj:`ConfigDict`): The dropout_layer used + when adding the shortcut. Default: None. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + batch_first (bool): Key, Query and Value are shape of + (batch, n, embed_dim) + or (n, batch, embed_dim). Default: False. + qkv_bias (bool): enable bias for qkv if True. Default True. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN'). + sr_ratio (int): The ratio of spatial reduction of Efficient Multi-head + Attention of Segformer. Default: 1. + """ + + def __init__(self, + embed_dims, + num_heads, + attn_drop=0., + proj_drop=0., + dropout_layer=None, + init_cfg=None, + batch_first=True, + qkv_bias=False, + tome_cfg=dict(), + norm_cfg=dict(type='LN'), + sr_ratio=1): + super().__init__( + embed_dims, + num_heads, + attn_drop, + proj_drop, + dropout_layer=dropout_layer, + init_cfg=init_cfg, + batch_first=batch_first, + bias=qkv_bias) + + self.q_mode = tome_cfg.get('q_mode') + self.kv_mode = tome_cfg.get('kv_mode') + self.tome_cfg = tome_cfg + + self.sr_ratio = sr_ratio + if sr_ratio > 1: + self.sr = Conv2d( + in_channels=embed_dims, + out_channels=embed_dims, + kernel_size=sr_ratio, + stride=sr_ratio) + # The ret[0] of build_norm_layer is norm name. + self.norm = build_norm_layer(norm_cfg, embed_dims)[1] + + def forward(self, x, hw_shape, identity=None): + x_q = x + + if self.sr_ratio > 1: + x_kv = nlc_to_nchw(x, hw_shape) + x_kv = self.sr(x_kv) + x_kv = nchw_to_nlc(x_kv) + x_kv = self.norm(x_kv) + else: + x_kv = x + + # 2D Neighbour Merging KV + if self.kv_mode == 'n2d': + kv_hw_shape = (int(hw_shape[0] / self.sr_ratio), int(hw_shape[1] / self.sr_ratio)) + x_kv = nlc_to_nchw(x_kv, kv_hw_shape) + x_kv = torch.nn.functional.avg_pool2d(x_kv, kernel_size=self.tome_cfg['kv_s'], + stride=self.tome_cfg['kv_s'], + ceil_mode=True) + x_kv = nchw_to_nlc(x_kv) + + # Bipartite Soft Matching (tomesd) KV + if self.kv_mode == 'bsm': + w_kv = int(hw_shape[1] / self.sr_ratio) + h_kv = int(hw_shape[0] / self.sr_ratio) + merge, unmerge = bipartite_soft_matching_random2d(metric=x_kv, w=w_kv, h=h_kv, + r=int(x_kv.size()[1] * self.tome_cfg['kv_r']), + sx=self.tome_cfg['kv_sx'], sy=self.tome_cfg['kv_sy'], + no_rand=True) + x_kv = merge(x_kv) + + if identity is None: + identity = x_q + + # 1D Neighbor Merging Q + if self.q_mode == 'n1d': + x_q = x_q.transpose(-2, -1) + x_q = torch.nn.functional.avg_pool1d(x_q, kernel_size=self.tome_cfg['q_s'], + stride=self.tome_cfg['q_s'], + ceil_mode=True) + x_q = x_q.transpose(-2, -1) + + # 2D Neighbor Merging Q + if self.q_mode == 'n2d': + reduced_hw = (int(torch.ceil(torch.tensor(hw_shape[0] / self.tome_cfg['q_s'][0]))), + int(torch.ceil(torch.tensor(hw_shape[1] / self.tome_cfg['q_s'][1])))) + x_q = nlc_to_nchw(x_q, hw_shape) + x_q = torch.nn.functional.avg_pool2d(x_q, kernel_size=self.tome_cfg['q_s'], + stride=self.tome_cfg['q_s'], + ceil_mode=True) + x_q = nchw_to_nlc(x_q) + + # Bipartite Soft Matching (tomesd) Q + if self.q_mode == 'bsm': + merge, unmerge = bipartite_soft_matching_random2d(metric=x_q, w=hw_shape[1], h=hw_shape[0], + r=int(x_q.size()[1] * self.tome_cfg['q_r']), + sx=self.tome_cfg['q_sx'], sy=self.tome_cfg['q_sy'], + no_rand=True) + x_q = merge(x_q) + + # Because the dataflow('key', 'query', 'value') of + # ``torch.nn.MultiheadAttention`` is (num_query, batch, + # embed_dims), We should adjust the shape of dataflow from + # batch_first (batch, num_query, embed_dims) to num_query_first + # (num_query ,batch, embed_dims), and recover ``attn_output`` + # from num_query_first to batch_first. + + if self.batch_first: + x_q = x_q.transpose(0, 1) + x_kv = x_kv.transpose(0, 1) + out = self.attn(query=x_q, key=x_kv, value=x_kv)[0] + if self.batch_first: + out = out.transpose(0, 1) + + # Unmerging BSM (tome+tomesd) + if self.q_mode == 'bsm': + out = unmerge(out) + + # Unmerging 1D Neighbour Merging + if self.q_mode == 'n1d': + out = out.transpose(-2, -1) + out = torch.nn.functional.interpolate(out, size=identity.size()[-2]) + out = out.transpose(-2, -1) + + # Unmerging 2D Neighbor Merging + if self.q_mode == 'n2d': + out = nlc_to_nchw(out, reduced_hw) + out = torch.nn.functional.interpolate(out, size=hw_shape) + out = nchw_to_nlc(out) + + return identity + self.dropout_layer(self.proj_drop(out)) + + +class TransformerEncoderLayer(BaseModule): + """Implements one encoder layer in Segformer. + + Args: + embed_dims (int): The feature dimension. + num_heads (int): Parallel attention heads. + feedforward_channels (int): The hidden dimension for FFNs. + drop_rate (float): Probability of an element to be zeroed. + after the feed forward layer. Default 0.0. + attn_drop_rate (float): The drop out rate for attention layer. + Default 0.0. + drop_path_rate (float): stochastic depth rate. Default 0.0. + qkv_bias (bool): enable bias for qkv if True. + Default: True. + act_cfg (dict): The activation config for FFNs. + Default: dict(type='GELU'). + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN'). + batch_first (bool): Key, Query and Value are shape of + (batch, n, embed_dim) + or (n, batch, embed_dim). Default: False. + init_cfg (dict, optional): Initialization config dict. + Default:None. + sr_ratio (int): The ratio of spatial reduction of Efficient Multi-head + Attention of Segformer. Default: 1. + with_cp (bool): Use checkpoint or not. Using checkpoint will save + some memory while slowing down the training speed. Default: False. + """ + + def __init__(self, + embed_dims, + num_heads, + feedforward_channels, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + qkv_bias=True, + tome_cfg=dict(), + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + batch_first=True, + sr_ratio=1, + with_cp=False): + super().__init__() + + # The ret[0] of build_norm_layer is norm name. + self.norm1 = build_norm_layer(norm_cfg, embed_dims)[1] + + self.attn = EfficientMultiheadAttention( + embed_dims=embed_dims, + num_heads=num_heads, + attn_drop=attn_drop_rate, + proj_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + batch_first=batch_first, + qkv_bias=qkv_bias, + tome_cfg=tome_cfg, + norm_cfg=norm_cfg, + sr_ratio=sr_ratio) + + # The ret[0] of build_norm_layer is norm name. + self.norm2 = build_norm_layer(norm_cfg, embed_dims)[1] + + self.ffn = MixFFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg) + + self.with_cp = with_cp + + def forward(self, x, hw_shape): + + def _inner_forward(x): + x = self.attn(self.norm1(x), hw_shape, identity=x) + x = self.ffn(self.norm2(x), hw_shape, identity=x) + return x + + if self.with_cp and x.requires_grad: + x = cp.checkpoint(_inner_forward, x) + else: + x = _inner_forward(x) + return x + + +@MODELS.register_module() +class MixVisionTransformer(BaseModule): + """The backbone of Segformer. + + This backbone is the implementation of `SegFormer: Simple and + Efficient Design for Semantic Segmentation with + Transformers `_. + Args: + in_channels (int): Number of input channels. Default: 3. + embed_dims (int): Embedding dimension. Default: 768. + num_stags (int): The num of stages. Default: 4. + num_layers (Sequence[int]): The layer number of each transformer encode + layer. Default: [3, 4, 6, 3]. + num_heads (Sequence[int]): The attention heads of each transformer + encode layer. Default: [1, 2, 4, 8]. + patch_sizes (Sequence[int]): The patch_size of each overlapped patch + embedding. Default: [7, 3, 3, 3]. + strides (Sequence[int]): The stride of each overlapped patch embedding. + Default: [4, 2, 2, 2]. + sr_ratios (Sequence[int]): The spatial reduction rate of each + transformer encode layer. Default: [8, 4, 2, 1]. + out_indices (Sequence[int] | int): Output from which stages. + Default: (0, 1, 2, 3). + mlp_ratio (int): ratio of mlp hidden dim to embedding dim. + Default: 4. + qkv_bias (bool): Enable bias for qkv if True. Default: True. + drop_rate (float): Probability of an element to be zeroed. + Default 0.0 + attn_drop_rate (float): The drop out rate for attention layer. + Default 0.0 + drop_path_rate (float): stochastic depth rate. Default 0.0 + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN') + act_cfg (dict): The activation config for FFNs. + Default: dict(type='GELU'). + pretrained (str, optional): model pretrained path. Default: None. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None. + with_cp (bool): Use checkpoint or not. Using checkpoint will save + some memory while slowing down the training speed. Default: False. + """ + + def __init__(self, + in_channels=3, + embed_dims=64, + num_stages=4, + num_layers=[3, 4, 6, 3], + num_heads=[1, 2, 4, 8], + patch_sizes=[7, 3, 3, 3], + strides=[4, 2, 2, 2], + sr_ratios=[8, 4, 2, 1], + out_indices=(0, 1, 2, 3), + mlp_ratio=4, + qkv_bias=True, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + tome_cfg=[dict(), dict(), dict(), dict()], + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN', eps=1e-6), + init_cfg=None, + with_cp=False, + down_sample=False): + super().__init__(init_cfg=init_cfg) + + self.embed_dims = embed_dims + self.num_stages = num_stages + self.num_layers = num_layers + self.num_heads = num_heads + self.patch_sizes = patch_sizes + self.strides = strides + self.sr_ratios = sr_ratios + self.with_cp = with_cp + self.down_sample = down_sample + assert num_stages == len(num_layers) == len(num_heads) \ + == len(patch_sizes) == len(strides) == len(sr_ratios) + + self.out_indices = out_indices + assert max(out_indices) < self.num_stages + + # transformer encoder + dpr = [ + x.item() + for x in torch.linspace(0, drop_path_rate, sum(num_layers)) + ] # stochastic num_layer decay rule + + cur = 0 + self.layers = ModuleList() + for i, num_layer in enumerate(num_layers): + embed_dims_i = embed_dims * num_heads[i] + patch_embed = PatchEmbed( + in_channels=in_channels, + embed_dims=embed_dims_i, + kernel_size=patch_sizes[i], + stride=strides[i], + padding=patch_sizes[i] // 2, + norm_cfg=norm_cfg) + layer = ModuleList([ + TransformerEncoderLayer( + embed_dims=embed_dims_i, + num_heads=num_heads[i], + feedforward_channels=mlp_ratio * embed_dims_i, + drop_rate=drop_rate, + attn_drop_rate=attn_drop_rate, + drop_path_rate=dpr[cur + idx], + qkv_bias=qkv_bias, + tome_cfg=tome_cfg[i], + act_cfg=act_cfg, + norm_cfg=norm_cfg, + with_cp=with_cp, + sr_ratio=sr_ratios[i]) for idx in range(num_layer) + ]) + in_channels = embed_dims_i + # The ret[0] of build_norm_layer is norm name. + norm = build_norm_layer(norm_cfg, embed_dims_i)[1] + self.layers.append(ModuleList([patch_embed, layer, norm])) + cur += num_layer + + def init_weights(self): + if self.init_cfg is None: + for m in self.modules(): + if isinstance(m, nn.Linear): + trunc_normal_init(m, std=.02, bias=0.) + elif isinstance(m, nn.LayerNorm): + constant_init(m, val=1.0, bias=0.) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[ + 1] * m.out_channels + fan_out //= m.groups + normal_init( + m, mean=0, std=math.sqrt(2.0 / fan_out), bias=0) + else: + super().init_weights() + + def forward(self, x): + if self.down_sample: + x = torch.nn.functional.interpolate(x, scale_factor=(0.5, 0.5)) + outs = [] + + for i, layer in enumerate(self.layers): + x, hw_shape = layer[0](x) + for block in layer[1]: + x = block(x, hw_shape) + x = layer[2](x) + x = nlc_to_nchw(x, hw_shape) + if i in self.out_indices: + outs.append(x) + + return outs diff --git a/build/lib/segformer_plusplus/model/head/__init__.py b/build/lib/segformer_plusplus/model/head/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..939f5552815b4e712af13103e34e72469f240ec9 --- /dev/null +++ b/build/lib/segformer_plusplus/model/head/__init__.py @@ -0,0 +1,3 @@ +from .segformer_head import SegformerHead + +__all__ = ['SegformerHead'] diff --git a/build/lib/segformer_plusplus/model/head/segformer_head.py b/build/lib/segformer_plusplus/model/head/segformer_head.py new file mode 100644 index 0000000000000000000000000000000000000000..4f34fcd6e540d4adbfdbfbbdae8704591e239fa0 --- /dev/null +++ b/build/lib/segformer_plusplus/model/head/segformer_head.py @@ -0,0 +1,95 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn +from mmcv.cnn import ConvModule +from mmengine.model import BaseModule + +from ...utils import MODELS +from ...utils import resize + + +@MODELS.register_module() +class SegformerHead(BaseModule): + """The all mlp Head of segformer. + + This head is the implementation of + `Segformer ` _. + + Args: + interpolate_mode: The interpolate mode of MLP head upsample operation. + Default: 'bilinear'. + """ + + def __init__(self, + in_channels=[32, 64, 160, 256], + in_index=[0, 1, 2, 3], + channels=256, + dropout_ratio=0.1, + out_channels=19, + norm_cfg=None, + align_corners=False, + interpolate_mode='bilinear'): + super().__init__() + + self.in_channels = in_channels + self.in_index = in_index + self.channels = channels + self.dropout_ratio = dropout_ratio + self.out_channels = out_channels + self.norm_cfg = norm_cfg + self.align_corners = align_corners + self.interpolate_mode = interpolate_mode + + self.act_cfg = dict(type='ReLU') + self.conv_seg = nn.Conv2d(channels, self.out_channels, kernel_size=1) + if dropout_ratio > 0: + self.dropout = nn.Dropout2d(dropout_ratio) + else: + self.dropout = None + + num_inputs = len(self.in_channels) + + assert num_inputs == len(self.in_index) + + self.convs = nn.ModuleList() + for i in range(num_inputs): + self.convs.append( + ConvModule( + in_channels=self.in_channels[i], + out_channels=self.channels, + kernel_size=1, + stride=1, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + + self.fusion_conv = ConvModule( + in_channels=self.channels * num_inputs, + out_channels=self.channels, + kernel_size=1, + norm_cfg=self.norm_cfg) + + def cls_seg(self, feat): + """Classify each pixel.""" + if self.dropout is not None: + feat = self.dropout(feat) + output = self.conv_seg(feat) + return output + + def forward(self, inputs): + # Receive 4 stage backbone feature map: 1/4, 1/8, 1/16, 1/32 + outs = [] + for idx in range(len(inputs)): + x = inputs[idx] + conv = self.convs[idx] + outs.append( + resize( + input=conv(x), + size=inputs[0].shape[2:], + mode=self.interpolate_mode, + align_corners=self.align_corners)) + + out = self.fusion_conv(torch.cat(outs, dim=1)) + + out = self.cls_seg(out) + + return out diff --git a/build/lib/segformer_plusplus/random_benchmark.py b/build/lib/segformer_plusplus/random_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..8cb1628279a081dd2c64635faea0d154d458ddd9 --- /dev/null +++ b/build/lib/segformer_plusplus/random_benchmark.py @@ -0,0 +1,61 @@ +from typing import Union, List, Tuple + +import numpy as np +import torch + +from .utils import benchmark + +device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') + + +def random_benchmark( + model: torch.nn.Module, + batch_size: Union[int, List[int]] = 1, + image_size: Union[Tuple[int], List[Tuple[int]]] = (3, 1024, 1024), +): + """ + Calculate the FPS of a given model using randomly generated tensors. + + Args: + model: instance of a model (e.g. SegFormer) + batch_size: the batch size(s) at which to calculate the FPS (e.g. 1 or [1, 2, 4]) + image_size: the size of the images to use (e.g. (3, 1024, 1024)) + + Returns: the FPS values calculated for all image sizes and batch sizes in the form of a dictionary + + """ + if isinstance(batch_size, int): + batch_size = [batch_size] + if isinstance(image_size, tuple): + image_size = [image_size] + + values = {} + throughput_values = [] + + for i in image_size: + # fill with fps for each batch size + fps = [] + for b in batch_size: + for _ in range(4): + # Baseline benchmark + if i[1] >= 1024: + r = 16 + else: + r = 32 + baseline_throughput = benchmark( + model.to(device), + device=device, + verbose=True, + runs=r, + batch_size=b, + input_size=i + ) + throughput_values.append(baseline_throughput) + throughput_values = np.asarray(throughput_values) + throughput = np.around(np.mean(throughput_values), decimals=2) + print('Im_size:', i, 'Batch_size:', b, 'Mean:', throughput, 'Std:', + np.around(np.std(throughput_values), decimals=2)) + throughput_values = [] + fps.append({b: throughput}) + values[i] = fps + return values diff --git a/build/lib/segformer_plusplus/utils/__init__.py b/build/lib/segformer_plusplus/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..0f7f00f36aa5ae06fd442c4f70f50540c18ebac3 --- /dev/null +++ b/build/lib/segformer_plusplus/utils/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .embed import PatchEmbed +from .shape_convert import nchw_to_nlc, nlc_to_nchw +from .wrappers import resize +from .tome_presets import tome_presets +from .registry import MODELS +from .imagenet_weights import imagenet_weights +from .benchmark import benchmark + +__all__ = [ + 'PatchEmbed', 'nchw_to_nlc', 'nlc_to_nchw', 'resize', 'tome_presets', 'MODELS', 'imagenet_weights', 'benchmark' +] diff --git a/build/lib/segformer_plusplus/utils/benchmark.py b/build/lib/segformer_plusplus/utils/benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..fc6fe92fddc7e5deeebc649bd5baf6f63e219912 --- /dev/null +++ b/build/lib/segformer_plusplus/utils/benchmark.py @@ -0,0 +1,76 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# Source: https://github.com/facebookresearch/ToMe/blob/main/tome/utils.py +# -------------------------------------------------------- + +import time +from typing import Tuple + +import torch +from tqdm import tqdm + + +def benchmark( + model: torch.nn.Module, + device: torch.device = 0, + input_size: Tuple[int] = (3, 224, 224), + batch_size: int = 64, + runs: int = 40, + throw_out: float = 0.25, + use_fp16: bool = False, + verbose: bool = False, +) -> float: + """ + Benchmark the given model with random inputs at the given batch size. + + Args: + - model: the module to benchmark + - device: the device to use for benchmarking + - input_size: the input size to pass to the model (channels, h, w) + - batch_size: the batch size to use for evaluation + - runs: the number of total runs to do + - throw_out: the percentage of runs to throw out at the start of testing + - use_fp16: whether or not to benchmark with float16 and autocast + - verbose: whether or not to use tqdm to print progress / print throughput at end + + Returns: + - the throughput measured in images / second + """ + if not isinstance(device, torch.device): + device = torch.device(device) + is_cuda = torch.device(device).type == "cuda" + + model = model.eval().to(device) + input = torch.rand(batch_size, *input_size, device=device) + if use_fp16: + input = input.half() + + warm_up = int(runs * throw_out) + total = 0 + start = time.time() + + with torch.autocast(device.type, enabled=use_fp16): + with torch.no_grad(): + for i in tqdm(range(runs), disable=not verbose, desc="Benchmarking"): + if i == warm_up: + if is_cuda: + torch.cuda.synchronize() + total = 0 + start = time.time() + + model(input) + total += batch_size + + if is_cuda: + torch.cuda.synchronize() + + end = time.time() + elapsed = end - start + + throughput = total / elapsed + + if verbose: + print(f"Throughput: {throughput:.2f} im/s") + + return throughput diff --git a/build/lib/segformer_plusplus/utils/embed.py b/build/lib/segformer_plusplus/utils/embed.py new file mode 100644 index 0000000000000000000000000000000000000000..880fa062912fc62b7ed1fa2804f18b1fdc614f6d --- /dev/null +++ b/build/lib/segformer_plusplus/utils/embed.py @@ -0,0 +1,330 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import Sequence + +import torch.nn as nn +import torch.nn.functional as F +from mmcv.cnn import build_conv_layer, build_norm_layer +from mmengine.model import BaseModule +from mmengine.utils import to_2tuple + + +class AdaptivePadding(nn.Module): + """Applies padding to input (if needed) so that input can get fully covered + by filter you specified. It supports two modes "same" and "corner". The + "same" mode is same with "SAME" padding mode in TensorFlow, pad zero around + input. The "corner" mode would pad zero to bottom right. + + Args: + kernel_size (int | tuple): Size of the kernel: + stride (int | tuple): Stride of the filter. Default: 1: + dilation (int | tuple): Spacing between kernel elements. + Default: 1. + padding (str): Support "same" and "corner", "corner" mode + would pad zero to bottom right, and "same" mode would + pad zero around input. Default: "corner". + Example: + >>> kernel_size = 16 + >>> stride = 16 + >>> dilation = 1 + >>> input = torch.rand(1, 1, 15, 17) + >>> adap_pad = AdaptivePadding( + >>> kernel_size=kernel_size, + >>> stride=stride, + >>> dilation=dilation, + >>> padding="corner") + >>> out = adap_pad(input) + >>> assert (out.shape[2], out.shape[3]) == (16, 32) + >>> input = torch.rand(1, 1, 16, 17) + >>> out = adap_pad(input) + >>> assert (out.shape[2], out.shape[3]) == (16, 32) + """ + + def __init__(self, kernel_size=1, stride=1, dilation=1, padding='corner'): + + super().__init__() + + assert padding in ('same', 'corner') + + kernel_size = to_2tuple(kernel_size) + stride = to_2tuple(stride) + dilation = to_2tuple(dilation) + + self.padding = padding + self.kernel_size = kernel_size + self.stride = stride + self.dilation = dilation + + def get_pad_shape(self, input_shape): + input_h, input_w = input_shape + kernel_h, kernel_w = self.kernel_size + stride_h, stride_w = self.stride + output_h = math.ceil(input_h / stride_h) + output_w = math.ceil(input_w / stride_w) + pad_h = max((output_h - 1) * stride_h + + (kernel_h - 1) * self.dilation[0] + 1 - input_h, 0) + pad_w = max((output_w - 1) * stride_w + + (kernel_w - 1) * self.dilation[1] + 1 - input_w, 0) + return pad_h, pad_w + + def forward(self, x): + pad_h, pad_w = self.get_pad_shape(x.size()[-2:]) + if pad_h > 0 or pad_w > 0: + if self.padding == 'corner': + x = F.pad(x, [0, pad_w, 0, pad_h]) + elif self.padding == 'same': + x = F.pad(x, [ + pad_w // 2, pad_w - pad_w // 2, pad_h // 2, + pad_h - pad_h // 2 + ]) + return x + + +class PatchEmbed(BaseModule): + """Image to Patch Embedding. + + We use a conv layer to implement PatchEmbed. + + Args: + in_channels (int): The num of input channels. Default: 3 + embed_dims (int): The dimensions of embedding. Default: 768 + conv_type (str): The config dict for embedding + conv layer type selection. Default: "Conv2d". + kernel_size (int): The kernel_size of embedding conv. Default: 16. + stride (int, optional): The slide stride of embedding conv. + Default: None (Would be set as `kernel_size`). + padding (int | tuple | string ): The padding length of + embedding conv. When it is a string, it means the mode + of adaptive padding, support "same" and "corner" now. + Default: "corner". + dilation (int): The dilation rate of embedding conv. Default: 1. + bias (bool): Bias of embed conv. Default: True. + norm_cfg (dict, optional): Config dict for normalization layer. + Default: None. + input_size (int | tuple | None): The size of input, which will be + used to calculate the out size. Only work when `dynamic_size` + is False. Default: None. + init_cfg (`mmengine.ConfigDict`, optional): The Config for + initialization. Default: None. + """ + + def __init__(self, + in_channels=3, + embed_dims=768, + conv_type='Conv2d', + kernel_size=16, + stride=None, + padding='corner', + dilation=1, + bias=True, + norm_cfg=None, + input_size=None, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + + self.embed_dims = embed_dims + if stride is None: + stride = kernel_size + + kernel_size = to_2tuple(kernel_size) + stride = to_2tuple(stride) + dilation = to_2tuple(dilation) + + if isinstance(padding, str): + self.adap_padding = AdaptivePadding( + kernel_size=kernel_size, + stride=stride, + dilation=dilation, + padding=padding) + # disable the padding of conv + padding = 0 + else: + self.adap_padding = None + padding = to_2tuple(padding) + + self.projection = build_conv_layer( + dict(type=conv_type), + in_channels=in_channels, + out_channels=embed_dims, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=bias) + + if norm_cfg is not None: + self.norm = build_norm_layer(norm_cfg, embed_dims)[1] + else: + self.norm = None + + if input_size: + input_size = to_2tuple(input_size) + # `init_out_size` would be used outside to + # calculate the num_patches + # when `use_abs_pos_embed` outside + self.init_input_size = input_size + if self.adap_padding: + pad_h, pad_w = self.adap_padding.get_pad_shape(input_size) + input_h, input_w = input_size + input_h = input_h + pad_h + input_w = input_w + pad_w + input_size = (input_h, input_w) + + # https://pytorch.org/docs/stable/generated/torch.nn.Conv2d.html + h_out = (input_size[0] + 2 * padding[0] - dilation[0] * + (kernel_size[0] - 1) - 1) // stride[0] + 1 + w_out = (input_size[1] + 2 * padding[1] - dilation[1] * + (kernel_size[1] - 1) - 1) // stride[1] + 1 + self.init_out_size = (h_out, w_out) + else: + self.init_input_size = None + self.init_out_size = None + + def forward(self, x): + """ + Args: + x (Tensor): Has shape (B, C, H, W). In most case, C is 3. + + Returns: + tuple: Contains merged results and its spatial shape. + + - x (Tensor): Has shape (B, out_h * out_w, embed_dims) + - out_size (tuple[int]): Spatial shape of x, arrange as + (out_h, out_w). + """ + + if self.adap_padding: + x = self.adap_padding(x) + + x = self.projection(x) + out_size = (x.shape[2], x.shape[3]) + x = x.flatten(2).transpose(1, 2) + if self.norm is not None: + x = self.norm(x) + return x, out_size + + +class PatchMerging(BaseModule): + """Merge patch feature map. + + This layer groups feature map by kernel_size, and applies norm and linear + layers to the grouped feature map. Our implementation uses `nn.Unfold` to + merge patch, which is about 25% faster than original implementation. + Instead, we need to modify pretrained models for compatibility. + + Args: + in_channels (int): The num of input channels. + out_channels (int): The num of output channels. + kernel_size (int | tuple, optional): the kernel size in the unfold + layer. Defaults to 2. + stride (int | tuple, optional): the stride of the sliding blocks in the + unfold layer. Default: None. (Would be set as `kernel_size`) + padding (int | tuple | string ): The padding length of + embedding conv. When it is a string, it means the mode + of adaptive padding, support "same" and "corner" now. + Default: "corner". + dilation (int | tuple, optional): dilation parameter in the unfold + layer. Default: 1. + bias (bool, optional): Whether to add bias in linear layer or not. + Defaults: False. + norm_cfg (dict, optional): Config dict for normalization layer. + Default: dict(type='LN'). + init_cfg (dict, optional): The extra config for initialization. + Default: None. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size=2, + stride=None, + padding='corner', + dilation=1, + bias=False, + norm_cfg=dict(type='LN'), + init_cfg=None): + super().__init__(init_cfg=init_cfg) + self.in_channels = in_channels + self.out_channels = out_channels + if stride: + stride = stride + else: + stride = kernel_size + + kernel_size = to_2tuple(kernel_size) + stride = to_2tuple(stride) + dilation = to_2tuple(dilation) + + if isinstance(padding, str): + self.adap_padding = AdaptivePadding( + kernel_size=kernel_size, + stride=stride, + dilation=dilation, + padding=padding) + # disable the padding of unfold + padding = 0 + else: + self.adap_padding = None + + padding = to_2tuple(padding) + self.sampler = nn.Unfold( + kernel_size=kernel_size, + dilation=dilation, + padding=padding, + stride=stride) + + sample_dim = kernel_size[0] * kernel_size[1] * in_channels + + if norm_cfg is not None: + self.norm = build_norm_layer(norm_cfg, sample_dim)[1] + else: + self.norm = None + + self.reduction = nn.Linear(sample_dim, out_channels, bias=bias) + + def forward(self, x, input_size): + """ + Args: + x (Tensor): Has shape (B, H*W, C_in). + input_size (tuple[int]): The spatial shape of x, arrange as (H, W). + Default: None. + + Returns: + tuple: Contains merged results and its spatial shape. + + - x (Tensor): Has shape (B, Merged_H * Merged_W, C_out) + - out_size (tuple[int]): Spatial shape of x, arrange as + (Merged_H, Merged_W). + """ + B, L, C = x.shape + assert isinstance(input_size, Sequence), f'Expect ' \ + f'input_size is ' \ + f'`Sequence` ' \ + f'but get {input_size}' + + H, W = input_size + assert L == H * W, 'input feature has wrong size' + + x = x.view(B, H, W, C).permute([0, 3, 1, 2]) # B, C, H, W + # Use nn.Unfold to merge patch. About 25% faster than original method, + # but need to modify pretrained model for compatibility + + if self.adap_padding: + x = self.adap_padding(x) + H, W = x.shape[-2:] + + x = self.sampler(x) + # if kernel_size=2 and stride=2, x should has shape (B, 4*C, H/2*W/2) + + out_h = (H + 2 * self.sampler.padding[0] - self.sampler.dilation[0] * + (self.sampler.kernel_size[0] - 1) - + 1) // self.sampler.stride[0] + 1 + out_w = (W + 2 * self.sampler.padding[1] - self.sampler.dilation[1] * + (self.sampler.kernel_size[1] - 1) - + 1) // self.sampler.stride[1] + 1 + + output_size = (out_h, out_w) + x = x.transpose(1, 2) # B, H/2*W/2, 4*C + x = self.norm(x) if self.norm else x + x = self.reduction(x) + return x, output_size diff --git a/build/lib/segformer_plusplus/utils/imagenet_weights.py b/build/lib/segformer_plusplus/utils/imagenet_weights.py new file mode 100644 index 0000000000000000000000000000000000000000..e9ef1d12b01027e22b2053c796f12a9a2e554ef5 --- /dev/null +++ b/build/lib/segformer_plusplus/utils/imagenet_weights.py @@ -0,0 +1,8 @@ +imagenet_weights = { + 'b0': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b0_20220624-7e0fe6dd.pth', + 'b1': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b1_20220624-02e5a6a1.pth', + 'b2': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b2_20220624-66e8bf70.pth', + 'b3': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b3_20220624-13b1141c.pth', + 'b4': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b4_20220624-d588d980.pth', + 'b5': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b5_20220624-658746d9.pth' +} \ No newline at end of file diff --git a/build/lib/segformer_plusplus/utils/registry.py b/build/lib/segformer_plusplus/utils/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..c991ea9d6909141d1d4dfe51e861d61b530f355a --- /dev/null +++ b/build/lib/segformer_plusplus/utils/registry.py @@ -0,0 +1,6 @@ +from mmengine import Registry + +MODELS = Registry( + 'models', + locations=['segformer_plusplus.model.backbone', 'segformer_plusplus.model.head'] +) diff --git a/build/lib/segformer_plusplus/utils/shape_convert.py b/build/lib/segformer_plusplus/utils/shape_convert.py new file mode 100644 index 0000000000000000000000000000000000000000..cce1e220b645d4b02df1ec2d9ed3137c8acba707 --- /dev/null +++ b/build/lib/segformer_plusplus/utils/shape_convert.py @@ -0,0 +1,107 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def nlc_to_nchw(x, hw_shape): + """Convert [N, L, C] shape tensor to [N, C, H, W] shape tensor. + + Args: + x (Tensor): The input tensor of shape [N, L, C] before conversion. + hw_shape (Sequence[int]): The height and width of output feature map. + + Returns: + Tensor: The output tensor of shape [N, C, H, W] after conversion. + """ + H, W = hw_shape + assert len(x.shape) == 3 + B, L, C = x.shape + assert L == H * W, 'The seq_len doesn\'t match H, W' + return x.transpose(1, 2).reshape(B, C, H, W) + + +def nchw_to_nlc(x): + """Flatten [N, C, H, W] shape tensor to [N, L, C] shape tensor. + + Args: + x (Tensor): The input tensor of shape [N, C, H, W] before conversion. + + Returns: + Tensor: The output tensor of shape [N, L, C] after conversion. + """ + assert len(x.shape) == 4 + return x.flatten(2).transpose(1, 2).contiguous() + + +def nchw2nlc2nchw(module, x, contiguous=False, **kwargs): + """Flatten [N, C, H, W] shape tensor `x` to [N, L, C] shape tensor. Use the + reshaped tensor as the input of `module`, and the convert the output of + `module`, whose shape is. + + [N, L, C], to [N, C, H, W]. + + Args: + module (Callable): A callable object the takes a tensor + with shape [N, L, C] as input. + x (Tensor): The input tensor of shape [N, C, H, W]. + contiguous: + contiguous (Bool): Whether to make the tensor contiguous + after each shape transform. + + Returns: + Tensor: The output tensor of shape [N, C, H, W]. + + Example: + >>> import torch + >>> import torch.nn as nn + >>> norm = nn.LayerNorm(4) + >>> feature_map = torch.rand(4, 4, 5, 5) + >>> output = nchw2nlc2nchw(norm, feature_map) + """ + B, C, H, W = x.shape + if not contiguous: + x = x.flatten(2).transpose(1, 2) + x = module(x, **kwargs) + x = x.transpose(1, 2).reshape(B, C, H, W) + else: + x = x.flatten(2).transpose(1, 2).contiguous() + x = module(x, **kwargs) + x = x.transpose(1, 2).reshape(B, C, H, W).contiguous() + return x + + +def nlc2nchw2nlc(module, x, hw_shape, contiguous=False, **kwargs): + """Convert [N, L, C] shape tensor `x` to [N, C, H, W] shape tensor. Use the + reshaped tensor as the input of `module`, and convert the output of + `module`, whose shape is. + + [N, C, H, W], to [N, L, C]. + + Args: + module (Callable): A callable object the takes a tensor + with shape [N, C, H, W] as input. + x (Tensor): The input tensor of shape [N, L, C]. + hw_shape: (Sequence[int]): The height and width of the + feature map with shape [N, C, H, W]. + contiguous (Bool): Whether to make the tensor contiguous + after each shape transform. + + Returns: + Tensor: The output tensor of shape [N, L, C]. + + Example: + >>> import torch + >>> import torch.nn as nn + >>> conv = nn.Conv2d(16, 16, 3, 1, 1) + >>> feature_map = torch.rand(4, 25, 16) + >>> output = nlc2nchw2nlc(conv, feature_map, (5, 5)) + """ + H, W = hw_shape + assert len(x.shape) == 3 + B, L, C = x.shape + assert L == H * W, 'The seq_len doesn\'t match H, W' + if not contiguous: + x = x.transpose(1, 2).reshape(B, C, H, W) + x = module(x, **kwargs) + x = x.flatten(2).transpose(1, 2) + else: + x = x.transpose(1, 2).reshape(B, C, H, W).contiguous() + x = module(x, **kwargs) + x = x.flatten(2).transpose(1, 2).contiguous() + return x diff --git a/build/lib/segformer_plusplus/utils/tome_presets.py b/build/lib/segformer_plusplus/utils/tome_presets.py new file mode 100644 index 0000000000000000000000000000000000000000..152c88689834f48338198657974de42db406957b --- /dev/null +++ b/build/lib/segformer_plusplus/utils/tome_presets.py @@ -0,0 +1,20 @@ +tome_presets = { + 'bsm_hq': [ + dict(q_mode=None, kv_mode='bsm', kv_r=0.6, kv_sx=2, kv_sy=2), + dict(q_mode=None, kv_mode='bsm', kv_r=0.6, kv_sx=2, kv_sy=2), + dict(q_mode='bsm', kv_mode=None, q_r=0.8, q_sx=4, q_sy=4), + dict(q_mode='bsm', kv_mode=None, q_r=0.8, q_sx=4, q_sy=4) + ], + 'bsm_fast': [ + dict(q_mode=None, kv_mode='bsm_r2D', kv_r=0.9, kv_sx=4, kv_sy=4), + dict(q_mode=None, kv_mode='bsm_r2D', kv_r=0.9, kv_sx=4, kv_sy=4), + dict(q_mode='bsm_r2D', kv_mode=None, q_r=0.9, q_sx=4, q_sy=4), + dict(q_mode='bsm_r2D', kv_mode=None, q_r=0.9, q_sx=4, q_sy=4) + ], + 'n2d_2x2': [ + dict(q_mode='neighbor_2D', kv_mode=None, q_s=(2, 2)), + dict(q_mode='neighbor_2D', kv_mode=None, q_s=(2, 2)), + dict(q_mode='neighbor_2D', kv_mode=None, q_s=(2, 2)), + dict(q_mode='neighbor_2D', kv_mode=None, q_s=(2, 2)) + ] +} diff --git a/build/lib/segformer_plusplus/utils/wrappers.py b/build/lib/segformer_plusplus/utils/wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..abbd0c029623b4f480a067e4b78adfec234ef8d0 --- /dev/null +++ b/build/lib/segformer_plusplus/utils/wrappers.py @@ -0,0 +1,51 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import warnings + +import torch.nn as nn +import torch.nn.functional as F + + +def resize(input, + size=None, + scale_factor=None, + mode='nearest', + align_corners=None, + warning=True): + if warning: + if size is not None and align_corners: + input_h, input_w = tuple(int(x) for x in input.shape[2:]) + output_h, output_w = tuple(int(x) for x in size) + if output_h > input_h or output_w > output_h: + if ((output_h > 1 and output_w > 1 and input_h > 1 + and input_w > 1) and (output_h - 1) % (input_h - 1) + and (output_w - 1) % (input_w - 1)): + warnings.warn( + f'When align_corners={align_corners}, ' + 'the output would more aligned if ' + f'input size {(input_h, input_w)} is `x+1` and ' + f'out size {(output_h, output_w)} is `nx+1`') + return F.interpolate(input, size, scale_factor, mode, align_corners) + + +class Upsample(nn.Module): + + def __init__(self, + size=None, + scale_factor=None, + mode='nearest', + align_corners=None): + super().__init__() + self.size = size + if isinstance(scale_factor, tuple): + self.scale_factor = tuple(float(factor) for factor in scale_factor) + else: + self.scale_factor = float(scale_factor) if scale_factor else None + self.mode = mode + self.align_corners = align_corners + + def forward(self, x): + if not self.size: + size = [int(t * self.scale_factor) for t in x.shape[-2:]] + else: + size = self.size + return resize(x, size, None, self.mode, self.align_corners) diff --git a/cityscapes_prediction_output_reference.txt b/cityscapes_prediction_output_reference.txt new file mode 100644 index 0000000000000000000000000000000000000000..f0154c94fb0ccbed140d9288f328003d9b2f754f --- /dev/null +++ b/cityscapes_prediction_output_reference.txt @@ -0,0 +1,256 @@ +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 10 10 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 8 8 8 8 10 8 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 5 5 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 5 5 5 5 5 5 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 8 8 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 8 10 10 10 8 8 8 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 8 8 5 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 5 5 5 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 5 5 5 5 5 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 5 5 5 5 5 5 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 5 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 5 10 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 5 10 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 10 5 10 10 10 10 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 10 5 10 5 10 10 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 10 5 10 5 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 10 5 5 8 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 5 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 5 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 5 5 8 8 8 8 5 8 8 8 5 8 8 5 5 5 8 8 8 8 8 8 8 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 11 11 11 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +4 4 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 5 5 8 8 8 8 5 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 11 11 11 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 5 5 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 11 11 11 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 4 4 4 8 8 8 8 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 5 8 8 8 8 5 8 8 8 5 8 8 8 8 8 5 5 5 8 8 8 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 11 11 11 11 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 11 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 4 4 4 8 8 8 4 8 4 4 4 4 4 4 4 4 4 4 +8 8 8 8 8 8 8 8 8 8 8 8 8 8 4 8 4 8 8 8 8 8 4 4 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 5 8 8 8 8 5 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 10 10 10 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 11 11 11 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 11 11 11 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +8 8 8 8 8 8 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 5 8 8 8 8 8 8 5 5 8 8 8 8 10 10 10 10 8 8 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 11 11 11 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 11 11 11 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +8 8 8 8 8 8 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 5 8 8 8 8 8 5 8 8 8 8 8 10 10 10 8 8 8 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 11 11 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 11 11 11 11 8 8 8 8 8 8 8 5 8 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 8 8 8 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 10 8 8 8 8 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 11 11 11 11 8 8 8 8 8 8 8 8 5 5 5 8 8 8 11 11 11 11 8 8 8 8 8 8 8 5 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 4 8 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 13 13 8 8 8 8 8 8 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 11 11 11 11 11 8 8 8 8 8 8 8 5 5 5 8 8 8 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 8 8 8 8 8 8 8 8 8 8 8 13 13 13 13 13 13 13 13 13 13 13 13 8 7 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 10 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 11 11 11 11 11 11 11 8 8 8 8 8 8 5 5 5 8 8 8 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 8 8 8 8 8 8 8 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 7 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 11 11 11 11 11 11 11 11 8 8 8 8 8 8 5 5 5 8 8 4 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 8 8 8 8 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 7 7 7 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 11 11 11 11 11 11 11 11 8 4 4 4 4 4 5 5 5 4 4 4 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 8 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 7 7 7 8 3 3 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 4 4 4 4 11 11 11 11 11 11 11 11 4 4 4 4 4 4 5 5 5 4 4 4 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 7 5 7 3 3 3 3 3 3 3 3 3 3 3 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 4 4 4 4 4 5 4 4 4 4 4 4 11 11 11 11 11 11 11 4 4 4 4 4 4 4 5 5 5 4 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 7 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 4 4 4 4 4 4 5 4 4 4 4 4 4 11 11 11 11 11 11 11 4 4 4 4 4 4 4 5 5 5 4 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 5 5 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 4 4 4 4 8 8 4 4 4 5 4 8 8 8 8 8 4 4 4 4 4 4 4 5 4 4 4 4 4 4 12 12 12 11 11 11 11 4 4 4 4 4 4 4 5 5 5 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 5 5 3 3 3 3 3 3 3 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 12 12 12 12 12 4 11 4 4 4 4 4 4 4 5 5 5 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 5 5 3 3 3 3 3 3 3 3 3 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 8 8 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 12 12 12 12 12 12 12 4 4 4 4 4 4 4 5 5 5 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 3 5 5 3 3 3 3 3 3 3 3 3 3 5 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 5 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 12 12 12 12 12 12 4 11 11 11 4 4 4 4 4 5 5 5 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 3 3 3 3 3 3 3 3 13 13 13 13 13 13 3 3 3 3 3 3 3 3 5 5 3 3 3 3 3 3 3 3 3 3 5 8 3 3 8 8 3 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 5 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 12 12 12 12 12 12 4 11 11 11 4 4 4 4 4 4 5 5 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 3 3 3 3 3 3 3 5 13 13 13 13 13 13 13 3 3 3 3 3 3 3 5 5 3 3 3 3 3 3 3 3 3 3 5 3 3 3 8 3 3 8 8 3 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 8 5 8 8 8 8 8 8 5 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 12 12 12 12 12 12 4 4 11 11 4 4 4 4 4 4 5 5 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 3 3 3 3 3 3 3 13 5 13 13 13 13 13 13 13 3 3 3 3 3 3 3 3 5 3 3 3 3 3 3 3 3 3 3 5 3 3 11 11 13 13 3 3 3 8 3 8 8 8 8 5 8 8 8 8 8 8 8 8 13 13 13 13 13 8 8 8 8 8 5 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 12 12 12 12 12 12 4 4 4 11 4 4 4 4 4 4 5 5 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 3 3 3 3 3 3 3 13 13 13 13 13 13 13 13 13 13 3 3 3 3 3 3 5 5 3 3 3 3 3 3 3 3 3 3 3 3 3 11 11 13 13 13 3 3 8 13 11 11 13 8 5 8 8 8 8 8 8 8 13 13 13 13 13 13 13 13 8 8 8 5 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 12 12 12 12 12 12 4 4 4 11 4 4 4 4 4 4 5 4 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 3 3 3 3 3 3 3 13 13 13 13 13 13 13 13 13 13 13 13 3 3 3 3 5 5 3 3 3 3 3 3 3 3 3 3 3 3 3 11 11 13 13 13 3 3 3 13 11 11 13 8 3 8 3 3 8 8 8 13 13 13 13 13 13 13 13 13 8 8 8 5 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 12 12 12 12 12 12 4 4 11 11 4 4 4 4 4 4 5 4 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 3 3 3 3 3 3 13 13 13 13 13 13 13 13 13 13 13 13 13 3 3 3 3 5 5 3 3 3 3 3 3 3 3 3 3 3 3 3 11 11 13 13 13 3 3 3 13 11 11 11 3 3 8 3 3 8 8 4 13 13 13 13 13 13 13 13 13 8 8 8 5 5 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 12 12 12 12 12 11 4 4 11 11 4 4 4 4 4 4 5 4 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 3 3 3 3 3 3 3 13 13 13 13 13 13 13 13 13 13 13 13 3 3 3 3 5 5 3 5 3 3 3 3 3 3 3 3 3 3 8 3 11 3 11 3 3 3 3 13 13 11 13 8 3 3 3 3 8 1 1 13 13 13 13 13 13 13 13 1 8 8 8 5 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 12 12 12 12 12 11 11 11 4 11 11 4 4 4 4 4 4 5 4 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 3 3 3 3 3 3 13 5 13 13 13 13 13 13 13 13 13 13 3 3 3 3 3 5 3 3 3 3 3 3 3 3 3 3 5 8 8 3 11 3 11 11 3 3 3 1 1 13 1 1 1 1 1 1 1 1 0 0 13 13 13 13 13 13 1 1 8 8 8 8 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 12 12 12 12 12 11 11 11 11 4 11 11 4 4 4 4 4 4 5 11 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 4 5 5 5 5 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 3 3 3 3 3 3 3 5 5 13 13 13 13 13 13 13 13 13 3 3 3 3 5 5 3 3 3 3 3 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 13 0 0 13 1 1 1 1 8 8 8 5 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 12 12 12 12 12 12 11 11 11 11 11 11 4 4 4 4 4 4 5 11 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 5 5 4 5 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 3 3 3 3 3 3 9 5 5 13 13 13 13 13 13 13 13 13 3 3 3 3 5 5 5 3 3 3 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 5 8 8 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 12 12 18 12 12 11 11 11 11 11 11 11 4 4 4 4 4 4 5 11 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 5 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 3 3 3 3 3 5 9 9 5 9 9 13 13 13 13 13 13 13 3 3 1 1 5 5 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 5 1 1 1 8 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 12 12 18 12 11 11 11 11 11 11 11 4 4 4 4 4 4 5 11 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 5 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 4 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 1 1 3 9 9 5 9 9 9 9 9 9 13 13 13 13 13 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 5 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 18 18 18 12 11 11 11 11 11 11 4 4 4 4 4 4 4 5 11 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 5 4 4 4 4 4 4 4 4 4 4 4 4 5 4 4 4 4 4 4 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 13 5 13 13 13 13 13 13 13 13 13 13 13 13 1 5 5 1 1 1 9 9 9 9 9 9 9 9 9 13 13 13 13 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 5 5 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 18 18 18 11 11 11 11 11 11 11 4 4 4 4 4 4 4 5 11 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 5 4 4 4 4 5 4 4 4 4 4 4 4 5 4 4 4 4 5 4 4 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 1 5 13 13 13 13 13 13 13 13 13 13 13 13 1 5 5 1 1 1 1 1 1 1 9 9 9 9 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 18 18 18 11 11 11 11 11 11 4 4 4 4 4 4 4 4 5 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 4 4 4 4 4 5 4 4 4 4 4 4 4 5 4 4 4 4 5 4 4 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 1 5 13 13 13 13 13 13 13 1 13 13 13 13 1 5 5 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 18 18 18 12 11 11 11 11 11 4 4 4 4 4 4 4 4 5 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 4 4 4 4 4 5 5 4 4 4 4 4 4 5 4 4 4 4 4 4 4 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 1 1 1 13 13 13 13 13 13 1 1 1 13 13 1 5 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 18 18 12 12 11 11 11 11 11 4 4 4 4 4 4 4 4 5 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 4 4 4 4 4 5 5 5 4 4 4 4 4 5 4 4 4 4 4 4 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 1 1 1 9 1 13 13 13 13 1 1 1 13 13 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 18 18 18 12 12 11 11 11 11 11 4 4 4 4 4 4 4 4 5 11 11 11 11 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 4 4 4 4 4 5 5 4 4 4 4 4 4 5 4 4 4 13 13 4 13 13 13 13 13 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 5 5 9 9 9 1 9 13 13 13 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 18 18 18 18 12 11 11 11 11 11 4 4 4 4 4 4 4 4 5 4 4 4 11 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 4 4 4 4 4 4 4 5 4 4 4 4 4 4 5 5 4 4 4 4 13 13 13 13 13 5 13 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 9 5 5 9 9 9 9 9 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 18 18 18 18 12 12 11 11 11 4 4 4 4 4 4 4 4 4 5 4 4 4 4 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 4 4 4 4 4 4 5 4 4 4 4 4 4 4 5 4 4 4 4 13 13 13 13 13 13 5 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 13 9 5 5 9 9 1 9 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 18 18 18 18 18 18 12 12 11 11 11 4 4 4 4 4 4 4 4 4 5 4 4 4 4 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 4 4 4 4 4 4 5 4 4 4 4 4 4 4 5 4 9 9 9 9 1 13 13 13 13 5 5 13 13 13 13 13 13 13 13 13 13 13 13 13 13 1 1 1 13 13 13 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 18 18 18 18 18 18 18 18 12 12 11 11 4 4 4 4 4 4 4 4 4 5 4 4 4 4 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 4 4 4 4 4 4 5 4 4 4 4 4 4 4 5 9 9 9 9 9 9 13 13 13 13 5 5 1 13 13 13 13 13 13 13 13 13 13 13 13 13 1 1 1 13 13 13 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 1 18 18 18 18 18 18 18 18 12 12 11 11 4 4 4 4 4 4 4 4 4 5 4 4 4 4 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 9 9 9 4 4 4 5 4 4 4 4 4 9 4 5 9 9 9 9 9 9 13 13 13 13 5 5 1 1 1 1 1 1 1 13 13 13 13 13 13 13 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 18 18 18 18 18 18 18 18 12 12 11 11 11 4 4 4 4 4 4 4 4 5 4 4 4 4 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 9 9 4 4 4 4 5 5 4 4 4 4 9 4 5 9 9 9 9 9 1 13 13 13 13 5 13 13 1 1 1 1 1 13 1 13 13 13 13 13 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 18 18 18 18 18 18 18 18 12 12 11 11 11 4 4 4 4 4 4 4 4 5 4 4 4 4 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 11 9 9 4 9 9 4 5 5 4 4 9 9 9 9 5 9 9 9 9 9 9 1 1 13 13 5 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 18 18 18 18 18 18 18 18 12 12 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 4 9 9 9 9 9 4 5 5 4 9 9 9 9 9 5 9 9 9 9 9 9 1 1 13 13 13 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 18 18 18 18 18 18 18 12 12 11 11 11 11 11 4 4 4 4 4 4 4 5 4 4 4 4 11 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 11 9 9 9 9 9 9 9 9 5 9 9 9 9 9 9 9 9 9 9 9 9 9 9 1 13 13 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 18 18 18 18 18 18 18 12 12 12 11 11 11 11 11 4 4 4 4 4 4 5 4 4 4 4 11 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 18 18 18 18 18 18 18 12 12 12 11 11 1 1 1 1 1 1 1 4 4 5 4 4 4 4 4 11 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 18 18 18 18 18 18 18 1 12 12 12 12 1 1 1 1 1 1 1 1 4 5 5 4 4 4 4 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +11 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 9 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 18 18 18 18 18 18 18 18 1 12 12 12 12 1 1 1 1 1 1 1 1 1 5 5 1 4 4 4 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +1 1 1 1 1 9 9 9 9 9 1 9 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 18 18 18 18 18 18 18 18 1 12 12 12 12 1 1 1 1 1 1 1 1 1 5 1 1 1 1 1 11 11 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 18 18 18 18 18 18 18 1 12 12 12 12 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 11 11 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 18 18 18 18 18 18 18 1 12 12 12 12 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 11 11 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 18 18 18 18 18 18 1 12 12 12 12 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 11 11 11 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +1 1 1 1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 18 18 18 18 18 18 1 1 12 12 12 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 11 11 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +1 1 1 1 1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 0 0 18 18 18 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 11 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +1 1 1 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 0 0 18 18 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 18 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 18 18 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 4 13 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 4 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 4 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 4 4 4 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 4 4 4 4 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 4 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 +0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 diff --git a/segformer_plusplus.egg-info/PKG-INFO b/segformer_plusplus.egg-info/PKG-INFO new file mode 100644 index 0000000000000000000000000000000000000000..cba86d59473b3d1e43f68639b46746e183b448ba --- /dev/null +++ b/segformer_plusplus.egg-info/PKG-INFO @@ -0,0 +1,11 @@ +Metadata-Version: 2.1 +Name: segformer-plusplus +Version: 0.2 +Summary: Segformer++: Efficient Token-Merging Strategies for High-Resolution Semantic Segmentation +Home-page: UNKNOWN +Author: Marco Kantonis +License: MIT +Platform: UNKNOWN + +https://arxiv.org/abs/2405.14467 + diff --git a/segformer_plusplus.egg-info/SOURCES.txt b/segformer_plusplus.egg-info/SOURCES.txt new file mode 100644 index 0000000000000000000000000000000000000000..009c4df383c107496f8931c322ee8010b7b5f591 --- /dev/null +++ b/segformer_plusplus.egg-info/SOURCES.txt @@ -0,0 +1,29 @@ +setup.py +segformer_plusplus/__init__.py +segformer_plusplus/build_model.py +segformer_plusplus/random_benchmark.py +segformer_plusplus.egg-info/PKG-INFO +segformer_plusplus.egg-info/SOURCES.txt +segformer_plusplus.egg-info/dependency_links.txt +segformer_plusplus.egg-info/requires.txt +segformer_plusplus.egg-info/top_level.txt +segformer_plusplus/configs/__init__.py +segformer_plusplus/configs/segformer_mit_b0.py +segformer_plusplus/configs/segformer_mit_b1.py +segformer_plusplus/configs/segformer_mit_b2.py +segformer_plusplus/configs/segformer_mit_b3.py +segformer_plusplus/configs/segformer_mit_b4.py +segformer_plusplus/configs/segformer_mit_b5.py +segformer_plusplus/model/__init__.py +segformer_plusplus/model/backbone/__init__.py +segformer_plusplus/model/backbone/mit.py +segformer_plusplus/model/head/__init__.py +segformer_plusplus/model/head/segformer_head.py +segformer_plusplus/utils/__init__.py +segformer_plusplus/utils/benchmark.py +segformer_plusplus/utils/embed.py +segformer_plusplus/utils/imagenet_weights.py +segformer_plusplus/utils/registry.py +segformer_plusplus/utils/shape_convert.py +segformer_plusplus/utils/tome_presets.py +segformer_plusplus/utils/wrappers.py \ No newline at end of file diff --git a/segformer_plusplus.egg-info/dependency_links.txt b/segformer_plusplus.egg-info/dependency_links.txt new file mode 100644 index 0000000000000000000000000000000000000000..8b137891791fe96927ad78e64b0aad7bded08bdc --- /dev/null +++ b/segformer_plusplus.egg-info/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/segformer_plusplus.egg-info/requires.txt b/segformer_plusplus.egg-info/requires.txt new file mode 100644 index 0000000000000000000000000000000000000000..dc91ad39950ede4091577bfc881fae9d7c6fff24 --- /dev/null +++ b/segformer_plusplus.egg-info/requires.txt @@ -0,0 +1,2 @@ +tomesd +torch>=2.0.1 diff --git a/segformer_plusplus.egg-info/top_level.txt b/segformer_plusplus.egg-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..7c7c3d0feeda21dced282857dfdb2e8aa168357a --- /dev/null +++ b/segformer_plusplus.egg-info/top_level.txt @@ -0,0 +1 @@ +segformer_plusplus diff --git a/segformer_plusplus/Registry/default_scope.py b/segformer_plusplus/Registry/default_scope.py new file mode 100644 index 0000000000000000000000000000000000000000..cad2253b6ebf651d54b8c6f26583e90ad8598297 --- /dev/null +++ b/segformer_plusplus/Registry/default_scope.py @@ -0,0 +1,95 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import copy +import time +from contextlib import contextmanager +from typing import Generator, Optional + +from ..utils.manager import ManagerMixin, _accquire_lock, _release_lock + + +class DefaultScope(ManagerMixin): + """Scope of current task used to reset the current registry, which can be + accessed globally. + + Consider the case of resetting the current ``Registry`` by + ``default_scope`` in the internal module which cannot access runner + directly, it is difficult to get the ``default_scope`` defined in + ``Runner``. However, if ``Runner`` created ``DefaultScope`` instance + by given ``default_scope``, the internal module can get + ``default_scope`` by ``DefaultScope.get_current_instance`` everywhere. + + Args: + name (str): Name of default scope for global access. + scope_name (str): Scope of current task. + + Examples: + >>> from mmengine.model import MODELS + >>> # Define default scope in runner. + >>> DefaultScope.get_instance('task', scope_name='mmdet') + >>> # Get default scope globally. + >>> scope_name = DefaultScope.get_instance('task').scope_name + """ + + def __init__(self, name: str, scope_name: str): + super().__init__(name) + assert isinstance( + scope_name, + str), (f'scope_name should be a string, but got {scope_name}') + self._scope_name = scope_name + + @property + def scope_name(self) -> str: + """ + Returns: + str: Get current scope. + """ + return self._scope_name + + @classmethod + def get_current_instance(cls) -> Optional['DefaultScope']: + """Get latest created default scope. + + Since default_scope is an optional argument for ``Registry.build``. + ``get_current_instance`` should return ``None`` if there is no + ``DefaultScope`` created. + + Examples: + >>> default_scope = DefaultScope.get_current_instance() + >>> # There is no `DefaultScope` created yet, + >>> # `get_current_instance` return `None`. + >>> default_scope = DefaultScope.get_instance( + >>> 'instance_name', scope_name='mmengine') + >>> default_scope.scope_name + mmengine + >>> default_scope = DefaultScope.get_current_instance() + >>> default_scope.scope_name + mmengine + + Returns: + Optional[DefaultScope]: Return None If there has not been + ``DefaultScope`` instance created yet, otherwise return the + latest created DefaultScope instance. + """ + _accquire_lock() + if cls._instance_dict: + instance = super().get_current_instance() + else: + instance = None + _release_lock() + return instance + + @classmethod + @contextmanager + def overwrite_default_scope(cls, scope_name: Optional[str]) -> Generator: + """Overwrite the current default scope with `scope_name`""" + if scope_name is None: + yield + else: + tmp = copy.deepcopy(cls._instance_dict) + # To avoid create an instance with the same name. + time.sleep(1e-6) + cls.get_instance(f'overwrite-{time.time()}', scope_name=scope_name) + try: + yield + finally: + cls._instance_dict = tmp diff --git a/segformer_plusplus/Registry/registry.py b/segformer_plusplus/Registry/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..5263f4c60c62e2caf9f35756274dd80c24245b18 --- /dev/null +++ b/segformer_plusplus/Registry/registry.py @@ -0,0 +1,735 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import inspect +import sys +import types +from collections import abc +from collections.abc import Callable +from contextlib import contextmanager +from importlib import import_module +from typing import Any, Dict, Generator, List, Optional, Tuple, Type, Union +from rich.console import Console +from rich.table import Table + +from .default_scope import DefaultScope + + +MODULE2PACKAGE = { + 'mmcls': 'mmcls', + 'mmdet': 'mmdet', + 'mmdet3d': 'mmdet3d', + 'mmseg': 'mmsegmentation', + 'mmaction': 'mmaction2', + 'mmtrack': 'mmtrack', + 'mmpose': 'mmpose', + 'mmedit': 'mmedit', + 'mmocr': 'mmocr', + 'mmgen': 'mmgen', + 'mmfewshot': 'mmfewshot', + 'mmrazor': 'mmrazor', + 'mmflow': 'mmflow', + 'mmhuman3d': 'mmhuman3d', + 'mmrotate': 'mmrotate', + 'mmselfsup': 'mmselfsup', + 'mmyolo': 'mmyolo', + 'mmpretrain': 'mmpretrain', + 'mmagic': 'mmagic', +} + +class Registry: + """A registry to map strings to classes or functions. + + Registered object could be built from registry. Meanwhile, registered + functions could be called from registry. + + Args: + name (str): Registry name. + build_func (callable, optional): A function to construct instance + from Registry. :func:`build_from_cfg` is used if neither ``parent`` + or ``build_func`` is specified. If ``parent`` is specified and + ``build_func`` is not given, ``build_func`` will be inherited + from ``parent``. Defaults to None. + parent (:obj:`Registry`, optional): Parent registry. The class + registered in children registry could be built from parent. + Defaults to None. + scope (str, optional): The scope of registry. It is the key to search + for children registry. If not specified, scope will be the name of + the package where class is defined, e.g. mmdet, mmcls, mmseg. + Defaults to None. + locations (list): The locations to import the modules registered + in this registry. Defaults to []. + New in version 0.4.0. + + Examples: + >>> # define a registry + >>> MODELS = Registry('models') + >>> # registry the `ResNet` to `MODELS` + >>> @MODELS.register_module() + >>> class ResNet: + >>> pass + >>> # build model from `MODELS` + >>> resnet = MODELS.build(dict(type='ResNet')) + >>> @MODELS.register_module() + >>> def resnet50(): + >>> pass + >>> resnet = MODELS.build(dict(type='resnet50')) + + >>> # hierarchical registry + >>> DETECTORS = Registry('detectors', parent=MODELS, scope='det') + >>> @DETECTORS.register_module() + >>> class FasterRCNN: + >>> pass + >>> fasterrcnn = DETECTORS.build(dict(type='FasterRCNN')) + + >>> # add locations to enable auto import + >>> DETECTORS = Registry('detectors', parent=MODELS, + >>> scope='det', locations=['det.models.detectors']) + >>> # define this class in 'det.models.detectors' + >>> @DETECTORS.register_module() + >>> class MaskRCNN: + >>> pass + >>> # The registry will auto import det.models.detectors.MaskRCNN + >>> fasterrcnn = DETECTORS.build(dict(type='det.MaskRCNN')) + + More advanced usages can be found at + https://mmengine.readthedocs.io/en/latest/advanced_tutorials/registry.html. + """ + + def __init__(self, + name: str, + build_func: Optional[Callable] = None, + parent: Optional['Registry'] = None, + scope: Optional[str] = None, + locations: List = []): + self._name = name + self._module_dict: Dict[str, Type] = dict() + self._children: Dict[str, 'Registry'] = dict() + self._locations = locations + self._imported = False + + if scope is not None: + assert isinstance(scope, str) + self._scope = scope + else: + self._scope = self.infer_scope() + + # See https://mypy.readthedocs.io/en/stable/common_issues.html# + # variables-vs-type-aliases for the use + self.parent: Optional['Registry'] + if parent is not None: + assert isinstance(parent, Registry) + parent._add_child(self) + self.parent = parent + else: + self.parent = None + + # self.build_func will be set with the following priority: + # 1. build_func + # 2. parent.build_func + # 3. build_from_cfg + self.build_func: Callable + if build_func is None: + if self.parent is not None: + self.build_func = self.parent.build_func + else: + from ..utils.build_functions import build_from_cfg + self.build_func = build_from_cfg + else: + self.build_func = build_func + + def __len__(self): + return len(self._module_dict) + + def __contains__(self, key): + return self.get(key) is not None + + def __repr__(self): + table = Table(title=f'Registry of {self._name}') + table.add_column('Names', justify='left', style='cyan') + table.add_column('Objects', justify='left', style='green') + + for name, obj in sorted(self._module_dict.items()): + table.add_row(name, str(obj)) + + console = Console() + with console.capture() as capture: + console.print(table, end='') + + return capture.get() + + @staticmethod + def infer_scope() -> str: + """Infer the scope of registry. + + The name of the package where registry is defined will be returned. + + Returns: + str: The inferred scope name. + + Examples: + >>> # in mmdet/models/backbone/resnet.py + >>> MODELS = Registry('models') + >>> @MODELS.register_module() + >>> class ResNet: + >>> pass + >>> # The scope of ``ResNet`` will be ``mmdet``. + """ + + # `sys._getframe` returns the frame object that many calls below the + # top of the stack. The call stack for `infer_scope` can be listed as + # follow: + # frame-0: `infer_scope` itself + # frame-1: `__init__` of `Registry` which calls the `infer_scope` + # frame-2: Where the `Registry(...)` is called + module = inspect.getmodule(sys._getframe(2)) + if module is not None: + filename = module.__name__ + split_filename = filename.split('.') + scope = split_filename[0] + else: + # use "mmengine" to handle some cases which can not infer the scope + # like initializing Registry in interactive mode + scope = 'mmengine' + return scope + + @staticmethod + def split_scope_key(key: str) -> Tuple[Optional[str], str]: + """Split scope and key. + + The first scope will be split from key. + + Return: + tuple[str | None, str]: The former element is the first scope of + the key, which can be ``None``. The latter is the remaining key. + + Examples: + >>> Registry.split_scope_key('mmdet.ResNet') + 'mmdet', 'ResNet' + >>> Registry.split_scope_key('ResNet') + None, 'ResNet' + """ + split_index = key.find('.') + if split_index != -1: + return key[:split_index], key[split_index + 1:] + else: + return None, key + + @property + def name(self): + return self._name + + @property + def scope(self): + return self._scope + + @property + def module_dict(self): + return self._module_dict + + @property + def children(self): + return self._children + + @property + def root(self): + return self._get_root_registry() + + @contextmanager + def switch_scope_and_registry(self, scope: Optional[str]) -> Generator: + """Temporarily switch default scope to the target scope, and get the + corresponding registry. + + If the registry of the corresponding scope exists, yield the + registry, otherwise yield the current itself. + + Args: + scope (str, optional): The target scope. + + Examples: + >>> from mmengine.registry import Registry, DefaultScope, MODELS + >>> import time + >>> # External Registry + >>> MMDET_MODELS = Registry('mmdet_model', scope='mmdet', + >>> parent=MODELS) + >>> MMCLS_MODELS = Registry('mmcls_model', scope='mmcls', + >>> parent=MODELS) + >>> # Local Registry + >>> CUSTOM_MODELS = Registry('custom_model', scope='custom', + >>> parent=MODELS) + >>> + >>> # Initiate DefaultScope + >>> DefaultScope.get_instance(f'scope_{time.time()}', + >>> scope_name='custom') + >>> # Check default scope + >>> DefaultScope.get_current_instance().scope_name + custom + >>> # Switch to mmcls scope and get `MMCLS_MODELS` registry. + >>> with CUSTOM_MODELS.switch_scope_and_registry(scope='mmcls') as registry: + >>> DefaultScope.get_current_instance().scope_name + mmcls + >>> registry.scope + mmcls + >>> # Nested switch scope + >>> with CUSTOM_MODELS.switch_scope_and_registry(scope='mmdet') as mmdet_registry: + >>> DefaultScope.get_current_instance().scope_name + mmdet + >>> mmdet_registry.scope + mmdet + >>> with CUSTOM_MODELS.switch_scope_and_registry(scope='mmcls') as mmcls_registry: + >>> DefaultScope.get_current_instance().scope_name + mmcls + >>> mmcls_registry.scope + mmcls + >>> + >>> # Check switch back to original scope. + >>> DefaultScope.get_current_instance().scope_name + custom + """ # noqa: E501 + + # Switch to the given scope temporarily. If the corresponding registry + # can be found in root registry, return the registry under the scope, + # otherwise return the registry itself. + with DefaultScope.overwrite_default_scope(scope): + # Get the global default scope + default_scope = DefaultScope.get_current_instance() + # Get registry by scope + if default_scope is not None: + scope_name = default_scope.scope_name + try: + import_module(f'{scope_name}.registry') + except (ImportError, AttributeError, ModuleNotFoundError): + if scope in MODULE2PACKAGE: + print( + f'{scope} is not installed and its ' + 'modules will not be registered. If you ' + 'want to use modules defined in ' + f'{scope}, Please install {scope} by ' + f'`pip install {MODULE2PACKAGE[scope]}.') + else: + print( + f'Failed to import `{scope}.registry` ' + f'make sure the registry.py exists in `{scope}` ' + 'package.',) + root = self._get_root_registry() + registry = root._search_child(scope_name) + if registry is None: + # if `default_scope` can not be found, fallback to argument + # `registry` + print( + f'Failed to search registry with scope "{scope_name}" ' + f'in the "{root.name}" registry tree. ' + f'As a workaround, the current "{self.name}" registry ' + f'in "{self.scope}" is used to build instance. This ' + 'may cause unexpected failure when running the built ' + f'modules. Please check whether "{scope_name}" is a ' + 'correct scope, or whether the registry is ' + 'initialized.',) + registry = self + # If there is no built default scope, just return current registry. + else: + registry = self + yield registry + + def _get_root_registry(self) -> 'Registry': + """Return the root registry.""" + root = self + while root.parent is not None: + root = root.parent + return root + + def import_from_location(self) -> None: + """Import modules from the pre-defined locations in self._location.""" + if not self._imported: + # avoid BC breaking + if len(self._locations) == 0 and self.scope in MODULE2PACKAGE: + print( + f'The "{self.name}" registry in {self.scope} did not ' + 'set import location. Fallback to call ' + f'`{self.scope}.utils.register_all_modules` ' + 'instead.',) + try: + module = import_module(f'{self.scope}.utils') + except (ImportError, AttributeError, ModuleNotFoundError): + if self.scope in MODULE2PACKAGE: + print( + f'{self.scope} is not installed and its ' + 'modules will not be registered. If you ' + 'want to use modules defined in ' + f'{self.scope}, Please install {self.scope} by ' + f'`pip install {MODULE2PACKAGE[self.scope]}.',) + else: + print( + f'Failed to import {self.scope} and register ' + 'its modules, please make sure you ' + 'have registered the module manually.',) + else: + # The import errors triggered during the registration + # may be more complex, here just throwing + # the error to avoid causing more implicit registry errors + # like `xxx`` not found in `yyy` registry. + module.register_all_modules(False) # type: ignore + + for loc in self._locations: + import_module(loc) + print( + f"Modules of {self.scope}'s {self.name} registry have " + f'been automatically imported from {loc}',) + self._imported = True + + def get(self, key: str) -> Optional[Type]: + """Get the registry record. + + If `key`` represents the whole object name with its module + information, for example, `mmengine.model.BaseModel`, ``get`` + will directly return the class object :class:`BaseModel`. + + Otherwise, it will first parse ``key`` and check whether it + contains a scope name. The logic to search for ``key``: + + - ``key`` does not contain a scope name, i.e., it is purely a module + name like "ResNet": :meth:`get` will search for ``ResNet`` from the + current registry to its parent or ancestors until finding it. + + - ``key`` contains a scope name and it is equal to the scope of the + current registry (e.g., "mmcls"), e.g., "mmcls.ResNet": :meth:`get` + will only search for ``ResNet`` in the current registry. + + - ``key`` contains a scope name and it is not equal to the scope of + the current registry (e.g., "mmdet"), e.g., "mmcls.FCNet": If the + scope exists in its children, :meth:`get` will get "FCNet" from + them. If not, :meth:`get` will first get the root registry and root + registry call its own :meth:`get` method. + + Args: + key (str): Name of the registered item, e.g., the class name in + string format. + + Returns: + Type or None: Return the corresponding class if ``key`` exists, + otherwise return None. + + Examples: + >>> # define a registry + >>> MODELS = Registry('models') + >>> # register `ResNet` to `MODELS` + >>> @MODELS.register_module() + >>> class ResNet: + >>> pass + >>> resnet_cls = MODELS.get('ResNet') + + >>> # hierarchical registry + >>> DETECTORS = Registry('detector', parent=MODELS, scope='det') + >>> # `ResNet` does not exist in `DETECTORS` but `get` method + >>> # will try to search from its parents or ancestors + >>> resnet_cls = DETECTORS.get('ResNet') + >>> CLASSIFIER = Registry('classifier', parent=MODELS, scope='cls') + >>> @CLASSIFIER.register_module() + >>> class MobileNet: + >>> pass + >>> # `get` from its sibling registries + >>> mobilenet_cls = DETECTORS.get('cls.MobileNet') + """ + + if not isinstance(key, str): + raise TypeError( + 'The key argument of `Registry.get` must be a str, ' + f'got {type(key)}') + + scope, real_key = self.split_scope_key(key) + obj_cls = None + registry_name = self.name + scope_name = self.scope + + # lazy import the modules to register them into the registry + self.import_from_location() + + if scope is None or scope == self._scope: + # get from self + if real_key in self._module_dict: + obj_cls = self._module_dict[real_key] + elif scope is None: + # try to get the target from its parent or ancestors + parent = self.parent + while parent is not None: + if real_key in parent._module_dict: + obj_cls = parent._module_dict[real_key] + registry_name = parent.name + scope_name = parent.scope + break + parent = parent.parent + else: + # import the registry to add the nodes into the registry tree + try: + import_module(f'{scope}.registry') + print( + f'Registry node of {scope} has been automatically ' + 'imported.',) + except (ImportError, AttributeError, ModuleNotFoundError): + print( + f'Cannot auto import {scope}.registry, please check ' + f'whether the package "{scope}" is installed correctly ' + 'or import the registry manually.',) + # get from self._children + if scope in self._children: + obj_cls = self._children[scope].get(real_key) + registry_name = self._children[scope].name + scope_name = scope + else: + root = self._get_root_registry() + + if scope != root._scope and scope not in root._children: + # If not skip directly, `root.get(key)` will recursively + # call itself until RecursionError is thrown. + pass + else: + obj_cls = root.get(key) + + if obj_cls is None: + # Actually, it's strange to implement this `try ... except` to + # get the object by its name in `Registry.get`. However, If we + # want to build the model using a configuration like + # `dict(type='mmengine.model.BaseModel')`, which can + # be dumped by lazy import config, we need this code snippet + # for `Registry.get` to work. + try: + obj_cls = get_object_from_string(key) + except Exception: + raise RuntimeError(f'Failed to get {key}') + + if obj_cls is not None: + # For some rare cases (e.g. obj_cls is a partial function), obj_cls + # doesn't have `__name__`. Use default value to prevent error + cls_name = getattr(obj_cls, '__name__', str(obj_cls)) + return obj_cls + + def _search_child(self, scope: str) -> Optional['Registry']: + """Depth-first search for the corresponding registry in its children. + + Note that the method only search for the corresponding registry from + the current registry. Therefore, if we want to search from the root + registry, :meth:`_get_root_registry` should be called to get the + root registry first. + + Args: + scope (str): The scope name used for searching for its + corresponding registry. + + Returns: + Registry or None: Return the corresponding registry if ``scope`` + exists, otherwise return None. + """ + if self._scope == scope: + return self + + for child in self._children.values(): + registry = child._search_child(scope) + if registry is not None: + return registry + + return None + + def build(self, cfg: dict, *args, **kwargs) -> Any: + """Build an instance. + + Build an instance by calling :attr:`build_func`. + + Args: + cfg (dict): Config dict needs to be built. + + Returns: + Any: The constructed object. + + Examples: + >>> from mmengine import Registry + >>> MODELS = Registry('models') + >>> @MODELS.register_module() + >>> class ResNet: + >>> def __init__(self, depth, stages=4): + >>> self.depth = depth + >>> self.stages = stages + >>> cfg = dict(type='ResNet', depth=50) + >>> model = MODELS.build(cfg) + """ + return self.build_func(cfg, *args, **kwargs, registry=self) + + def _add_child(self, registry: 'Registry') -> None: + """Add a child for a registry. + + Args: + registry (:obj:`Registry`): The ``registry`` will be added as a + child of the ``self``. + """ + + assert isinstance(registry, Registry) + assert registry.scope is not None + assert registry.scope not in self.children, \ + f'scope {registry.scope} exists in {self.name} registry' + self.children[registry.scope] = registry + + def _register_module(self, + module: Type, + module_name: Optional[Union[str, List[str]]] = None, + force: bool = False) -> None: + """Register a module. + + Args: + module (type): Module to be registered. Typically a class or a + function, but generally all ``Callable`` are acceptable. + module_name (str or list of str, optional): The module name to be + registered. If not specified, the class name will be used. + Defaults to None. + force (bool): Whether to override an existing class with the same + name. Defaults to False. + """ + if not callable(module): + raise TypeError(f'module must be Callable, but got {type(module)}') + + if module_name is None: + module_name = module.__name__ + if isinstance(module_name, str): + module_name = [module_name] + for name in module_name: + if not force and name in self._module_dict: + existed_module = self.module_dict[name] + raise KeyError(f'{name} is already registered in {self.name} ' + f'at {existed_module.__module__}') + self._module_dict[name] = module + + def register_module( + self, + name: Optional[Union[str, List[str]]] = None, + force: bool = False, + module: Optional[Type] = None) -> Union[type, Callable]: + """Register a module. + + A record will be added to ``self._module_dict``, whose key is the class + name or the specified name, and value is the class itself. + It can be used as a decorator or a normal function. + + Args: + name (str or list of str, optional): The module name to be + registered. If not specified, the class name will be used. + force (bool): Whether to override an existing class with the same + name. Defaults to False. + module (type, optional): Module class or function to be registered. + Defaults to None. + + Examples: + >>> backbones = Registry('backbone') + >>> # as a decorator + >>> @backbones.register_module() + >>> class ResNet: + >>> pass + >>> backbones = Registry('backbone') + >>> @backbones.register_module(name='mnet') + >>> class MobileNet: + >>> pass + + >>> # as a normal function + >>> class ResNet: + >>> pass + >>> backbones.register_module(module=ResNet) + """ + if not isinstance(force, bool): + raise TypeError(f'force must be a boolean, but got {type(force)}') + + # raise the error ahead of time + if not (name is None or isinstance(name, str) or is_seq_of(name, str)): + raise TypeError( + 'name must be None, an instance of str, or a sequence of str, ' + f'but got {type(name)}') + + # use it as a normal method: x.register_module(module=SomeClass) + if module is not None: + self._register_module(module=module, module_name=name, force=force) + return module + + # use it as a decorator: @x.register_module() + def _register(module): + self._register_module(module=module, module_name=name, force=force) + return module + + return _register + + +def is_seq_of(seq: Any, + expected_type: Union[Type, tuple], + seq_type: Optional[Type] = None) -> bool: + """Check whether it is a sequence of some type. + + Args: + seq (Sequence): The sequence to be checked. + expected_type (type or tuple): Expected type of sequence items. + seq_type (type, optional): Expected sequence type. Defaults to None. + + Returns: + bool: Return True if ``seq`` is valid else False. + + Examples: + >>> from mmengine.utils import is_seq_of + >>> seq = ['a', 'b', 'c'] + >>> is_seq_of(seq, str) + True + >>> is_seq_of(seq, int) + False + """ + if seq_type is None: + exp_seq_type = abc.Sequence + else: + assert isinstance(seq_type, type) + exp_seq_type = seq_type + if not isinstance(seq, exp_seq_type): + return False + for item in seq: + if not isinstance(item, expected_type): + return False + return True + + +def get_object_from_string(obj_name: str): + """Get object from name. + + Args: + obj_name (str): The name of the object. + + Examples: + >>> get_object_from_string('torch.optim.sgd.SGD') + >>> torch.optim.sgd.SGD + """ + parts = iter(obj_name.split('.')) + module_name = next(parts) + # import module + while True: + try: + module = import_module(module_name) + part = next(parts) + # mmcv.ops has nms.py and nms function at the same time. So the + # function will have a higher priority + obj = getattr(module, part, None) + if obj is not None and not ismodule(obj): + break + module_name = f'{module_name}.{part}' + except StopIteration: + # if obj is a module + return module + except ImportError: + return None + + # get class or attribute from module + obj = module + while True: + try: + obj = getattr(obj, part) + part = next(parts) + except StopIteration: + return obj + except AttributeError: + return None + +def ismodule(object): + """Return true if the object is a module. + + Module objects provide these attributes: + __cached__ pathname to byte compiled file + __doc__ documentation string + __file__ filename (missing for built-in modules)""" + return isinstance(object, types.ModuleType) \ No newline at end of file diff --git a/segformer_plusplus/__init__.py b/segformer_plusplus/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cd8a768648cca72d7cc393e7eb85b79b434d9fc7 --- /dev/null +++ b/segformer_plusplus/__init__.py @@ -0,0 +1,4 @@ +from .build_model import create_model, create_custom_model +from .random_benchmark import random_benchmark + +__all__ = ['create_model', 'create_custom_model', 'random_benchmark'] diff --git a/segformer_plusplus/build_model.py b/segformer_plusplus/build_model.py new file mode 100644 index 0000000000000000000000000000000000000000..250786c42e70943461d6cb6e2ce4c09c6984e652 --- /dev/null +++ b/segformer_plusplus/build_model.py @@ -0,0 +1,107 @@ +import os + +from .utils import MODELS, imagenet_weights +from .utils import tome_presets +from .model.base_module import BaseModule +from .configs.config.config import Config +from .utils.build_functions import build_model_from_cfg + + +class SegFormer(BaseModule): + """ + This class represents a SegFormer model that allows for the application of token merging. + + Attributes: + backbone (BaseModule): MixVisionTransformer backbone + decode_head (BaseModule): SegFormer head + + """ + def __init__(self, cfg): + """ + Initialize the SegFormer model. + + Args: + cfg (Config): an mmengine Config object, which defines the backbone, head and token merging strategy used. + + """ + super().__init__() + self.backbone = build_model_from_cfg(cfg.backbone, registry=MODELS) + self.decode_head = build_model_from_cfg(cfg.decode_head, registry=MODELS) + + def forward(self, x): + """ + Forward pass of the model. + + Args: + x (torch.Tensor): input tensor of shape [B, C, H, W] + + Returns: + torch.Tensor: output tensor + + """ + x = self.backbone(x) + x = self.decode_head(x) + return x + + +def create_model( + backbone: str = 'b0', + tome_strategy: str = None, + out_channels: int = 19, + pretrained: bool = False, +): + """ + Create a SegFormer model using the predefined SegFormer backbones from the MiT series (b0-b5). + + Args: + backbone (str): backbone name (e.g. 'b0') + tome_strategy (str | list(dict)): select strategy from presets ('bsm_hq', 'bsm_fast', 'n2d_2x2') or define a + custom strategy using a list, that contains of dictionaries, in which the strategies for the stage are + defined + out_channels (int): number of output channels (e.g. 19 for the cityscapes semantic segmentation task) + pretrained: use pretrained (imagenet) weights + + Returns: + BaseModule: SegFormer model + + """ + backbone = backbone.lower() + assert backbone in [f'b{i}' for i in range(6)] + + wd = os.path.dirname(os.path.abspath(__file__)) + + cfg = Config.fromfile(os.path.join(wd, 'configs', f'segformer_mit_{backbone}.py')) + + cfg.decode_head.out_channels = out_channels + + if tome_strategy is not None: + if tome_strategy not in list(tome_presets.keys()): + print("Using custom merging strategy.") + cfg.backbone.tome_cfg = tome_presets[tome_strategy] + + # load imagenet weights + if pretrained: + cfg.backbone.init_cfg = dict(type='Pretrained', checkpoint=imagenet_weights[backbone]) + + return SegFormer(cfg) + + +def create_custom_model( + model_cfg: Config, + tome_strategy: list[dict] = None, +): + """ + Create a SegFormer model with customizable backbone and head. + + Args: + model_cfg (Config): backbone name (e.g. 'b0') + tome_strategy (list(dict)): custom token merging strategy + + Returns: + BaseModule: SegFormer model + + """ + if tome_strategy is not None: + model_cfg.backbone.tome_cfg = tome_strategy + + return SegFormer(model_cfg) diff --git a/segformer_plusplus/cityscape_benchmark.py b/segformer_plusplus/cityscape_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..2cbd882156476e05b3314a9fa513f7939b3589e6 --- /dev/null +++ b/segformer_plusplus/cityscape_benchmark.py @@ -0,0 +1,117 @@ +import torch +from PIL import Image +import torchvision.transforms as T +import os +from typing import Union, List, Tuple +import numpy as np + +from .utils.benchmark import benchmark + + +# Gerät auswählen +device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') +print(f"Using device: {device}") +if device.type == 'cuda': + print(f"CUDA Device Name: {torch.cuda.get_device_name(torch.cuda.current_device())}") + + +def cityscape_benchmark( + model: torch.nn.Module, + image_path: str, + batch_size: Union[int, List[int]] = 1, + image_size: Union[Tuple[int], List[Tuple[int]]] = (3, 1024, 1024), + save_output: bool = True, + +): + """ + Calculate the FPS of a given model using an actual Cityscapes image. + + Args: + model: instance of a model (e.g. SegFormer) + image_path: the path to the Cityscapes image + batch_size: the batch size(s) at which to calculate the FPS (e.g. 1 or [1, 2, 4]) + image_size: the size of the images to use (e.g. (3, 1024, 1024)) + save_output: whether to save the output prediction (default True) + + Returns: + the FPS values calculated for all image sizes and batch sizes in the form of a dictionary + """ + + + if isinstance(batch_size, int): + batch_size = [batch_size] + if isinstance(image_size, tuple): + image_size = [image_size] + + values = {} + throughput_values = [] + + model = model.to(device) + model.eval() + + assert os.path.exists(image_path), f"Image not found: {image_path}" + image = Image.open(image_path).convert("RGB") + + img_tensor = T.ToTensor()(image) + mean = img_tensor.mean(dim=(1, 2)) + std = img_tensor.std(dim=(1, 2)) + print(f"Calculated Mean: {mean}") + print(f"Calculated Std: {std}") + + transform = T.Compose([ + T.Resize((image_size[0][1], image_size[0][2])), + T.ToTensor(), + T.Normalize(mean=mean.tolist(), + std=std.tolist()) + ]) + + img_tensor = transform(image).unsqueeze(0).to(device) + + for i in image_size: + # fill with fps for each batch size + fps = [] + for b in batch_size: + for _ in range(4): + # Baseline benchmark + if i[1] >= 1024: + r = 16 + else: + r = 32 + baseline_throughput = benchmark( + model.to(device), + device=device, + verbose=True, + runs=r, + batch_size=b, + input_size=i + ) + throughput_values.append(baseline_throughput) + throughput_values = np.asarray(throughput_values) + throughput = np.around(np.mean(throughput_values), decimals=2) + print('Im_size:', i, 'Batch_size:', b, 'Mean:', throughput, 'Std:', + np.around(np.std(throughput_values), decimals=2)) + throughput_values = [] + fps.append({b: throughput}) + values[i] = fps + + if save_output: + with torch.no_grad(): + with open("model_output_log.txt", "w") as f: + f.write("=== Model Input Info ===\n") + f.write(f"Input tensor:\n{img_tensor}\n") + f.write(f"Input shape: {img_tensor.shape}\n") + f.write(f"Input stats: mean = {img_tensor.mean().item()}, std = {img_tensor.std().item()}\n\n") + + output = model(img_tensor) + + f.write("=== Raw Model Output ===\n") + f.write(f"{output}\n\n") + + pred = torch.argmax(output, dim=1).squeeze(0).cpu().numpy() + + # Speichere Prediction als Text ab + np.savetxt("cityscapes_prediction_output.txt", pred, fmt="%d") + + print("Prediction saved as cityscapes_prediction_output.txt") + + return values diff --git a/segformer_plusplus/configs/__init__.py b/segformer_plusplus/configs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b680692d5ff945d85311a8c90c70766275444498 --- /dev/null +++ b/segformer_plusplus/configs/__init__.py @@ -0,0 +1 @@ +__all__ = [] \ No newline at end of file diff --git a/segformer_plusplus/configs/config/config.py b/segformer_plusplus/configs/config/config.py new file mode 100644 index 0000000000000000000000000000000000000000..1b68b848b0d95a861f5c0b351768d6b9abea76b2 --- /dev/null +++ b/segformer_plusplus/configs/config/config.py @@ -0,0 +1,1545 @@ +import ast +import copy +import os +import os.path as osp +import platform +import shutil +import sys +import tempfile +import types +import uuid +import re +import warnings +from argparse import ArgumentParser +from collections import OrderedDict, abc +from pathlib import Path +from typing import Any, Optional, Tuple, Union +from omegaconf import OmegaConf +import yapf +from addict import Dict +from yapf.yapflib.yapf_api import FormatCode + +from .lazy import LazyAttr, LazyObject +from .utils import (check_file_exist, get_installed_path, import_modules_from_strings, is_installed, RemoveAssignFromAST, + ImportTransformer, _gather_abs_import_lazyobj, _get_external_cfg_base_path, + _get_external_cfg_path, _get_package_and_cfg_path, _is_builtin_module, dump) + + +BASE_KEY = '_base_' +DELETE_KEY = '_delete_' +DEPRECATION_KEY = '_deprecation_' +RESERVED_KEYS = ['filename', 'text', 'pretty_text', 'env_variables'] + + +def _lazy2string(cfg_dict, dict_type=None): + if isinstance(cfg_dict, dict): + dict_type = dict_type or type(cfg_dict) + return dict_type( + {k: _lazy2string(v, dict_type) + for k, v in dict.items(cfg_dict)}) + elif isinstance(cfg_dict, (tuple, list)): + return type(cfg_dict)(_lazy2string(v, dict_type) for v in cfg_dict) + elif isinstance(cfg_dict, (LazyAttr, LazyObject)): + return f'{cfg_dict.module}.{str(cfg_dict)}' + else: + return cfg_dict + + +class ConfigDict(Dict): + """A dictionary for config which has the same interface as python's built- + in dictionary and can be used as a normal dictionary. + + The Config class would transform the nested fields (dictionary-like fields) + in config file into ``ConfigDict``. + + If the class attribute ``lazy`` is ``False``, users will get the + object built by ``LazyObject`` or ``LazyAttr``, otherwise users will get + the ``LazyObject`` or ``LazyAttr`` itself. + + The ``lazy`` should be set to ``True`` to avoid building the imported + object during configuration parsing, and it should be set to False outside + the Config to ensure that users do not experience the ``LazyObject``. + """ + lazy = False + + def __init__(__self, *args, **kwargs): + object.__setattr__(__self, '__parent', kwargs.pop('__parent', None)) + object.__setattr__(__self, '__key', kwargs.pop('__key', None)) + object.__setattr__(__self, '__frozen', False) + for arg in args: + if not arg: + continue + # Since ConfigDict.items will convert LazyObject to real object + # automatically, we need to call super().items() to make sure + # the LazyObject will not be converted. + if isinstance(arg, ConfigDict): + for key, val in dict.items(arg): + __self[key] = __self._hook(val) + elif isinstance(arg, dict): + for key, val in arg.items(): + __self[key] = __self._hook(val) + elif isinstance(arg, tuple) and (not isinstance(arg[0], tuple)): + __self[arg[0]] = __self._hook(arg[1]) + else: + for key, val in iter(arg): + __self[key] = __self._hook(val) + + for key, val in dict.items(kwargs): + __self[key] = __self._hook(val) + + def __missing__(self, name): + raise KeyError(name) + + def __getattr__(self, name): + try: + value = super().__getattr__(name) + if isinstance(value, (LazyAttr, LazyObject)) and not self.lazy: + value = value.build() + except KeyError: + raise AttributeError(f"'{self.__class__.__name__}' object has no " + f"attribute '{name}'") + except Exception as e: + raise e + else: + return value + + @classmethod + def _hook(cls, item): + # avoid to convert user defined dict to ConfigDict. + if type(item) in (dict, OrderedDict): + return cls(item) + elif isinstance(item, (list, tuple)): + return type(item)(cls._hook(elem) for elem in item) + return item + + def __setattr__(self, name, value): + value = self._hook(value) + return super().__setattr__(name, value) + + def __setitem__(self, name, value): + value = self._hook(value) + return super().__setitem__(name, value) + + def __getitem__(self, key): + return self.build_lazy(super().__getitem__(key)) + + def __deepcopy__(self, memo): + other = self.__class__() + memo[id(self)] = other + for key, value in super().items(): + other[copy.deepcopy(key, memo)] = copy.deepcopy(value, memo) + return other + + def __copy__(self): + other = self.__class__() + for key, value in super().items(): + other[key] = value + return other + + copy = __copy__ + + def __iter__(self): + # Implement `__iter__` to overwrite the unpacking operator `**cfg_dict` + # to get the built lazy object + return iter(self.keys()) + + def get(self, key: str, default: Optional[Any] = None) -> Any: + """Get the value of the key. If class attribute ``lazy`` is True, the + LazyObject will be built and returned. + + Args: + key (str): The key. + default (any, optional): The default value. Defaults to None. + + Returns: + Any: The value of the key. + """ + return self.build_lazy(super().get(key, default)) + + def pop(self, key, default=None): + """Pop the value of the key. If class attribute ``lazy`` is True, the + LazyObject will be built and returned. + + Args: + key (str): The key. + default (any, optional): The default value. Defaults to None. + + Returns: + Any: The value of the key. + """ + return self.build_lazy(super().pop(key, default)) + + def update(self, *args, **kwargs) -> None: + """Override this method to make sure the LazyObject will not be built + during updating.""" + other = {} + if args: + if len(args) > 1: + raise TypeError('update only accept one positional argument') + # Avoid to used self.items to build LazyObject + for key, value in dict.items(args[0]): + other[key] = value + + for key, value in dict(kwargs).items(): + other[key] = value + for k, v in other.items(): + if ((k not in self) or (not isinstance(self[k], dict)) + or (not isinstance(v, dict))): + self[k] = self._hook(v) + else: + self[k].update(v) + + def build_lazy(self, value: Any) -> Any: + """If class attribute ``lazy`` is False, the LazyObject will be built + and returned. + + Args: + value (Any): The value to be built. + + Returns: + Any: The built value. + """ + if isinstance(value, (LazyAttr, LazyObject)) and not self.lazy: + value = value.build() + return value + + def values(self): + """Yield the values of the dictionary. + + If class attribute ``lazy`` is False, the value of ``LazyObject`` or + ``LazyAttr`` will be built and returned. + """ + values = [] + for value in super().values(): + values.append(self.build_lazy(value)) + return values + + def items(self): + """Yield the keys and values of the dictionary. + + If class attribute ``lazy`` is False, the value of ``LazyObject`` or + ``LazyAttr`` will be built and returned. + """ + items = [] + for key, value in super().items(): + items.append((key, self.build_lazy(value))) + return items + + def merge(self, other: dict): + """Merge another dictionary into current dictionary. + + Args: + other (dict): Another dictionary. + """ + default = object() + + def _merge_a_into_b(a, b): + if isinstance(a, dict): + if not isinstance(b, dict): + a.pop(DELETE_KEY, None) + return a + if a.pop(DELETE_KEY, False): + b.clear() + all_keys = list(b.keys()) + list(a.keys()) + return { + key: + _merge_a_into_b(a.get(key, default), b.get(key, default)) + for key in all_keys if key != DELETE_KEY + } + else: + return a if a is not default else b + + merged = _merge_a_into_b(copy.deepcopy(other), copy.deepcopy(self)) + self.clear() + for key, value in merged.items(): + self[key] = value + + def __reduce_ex__(self): + # Override __reduce_ex__ to avoid `self.items` will be + # called by CPython interpreter during pickling. See more details in + # https://github.com/python/cpython/blob/8d61a71f9c81619e34d4a30b625922ebc83c561b/Objects/typeobject.c#L6196 # noqa: E501 + from ...utils import digit_version + if digit_version(platform.python_version()) < digit_version('3.8'): + return (self.__class__, ({k: v + for k, v in super().items()}, ), None, + None, None) + else: + return (self.__class__, ({k: v + for k, v in super().items()}, ), None, + None, None, None) + + def __eq__(self, other): + if isinstance(other, ConfigDict): + return other.to_dict() == self.to_dict() + elif isinstance(other, dict): + return {k: v for k, v in self.items()} == other + else: + return False + + def _to_lazy_dict(self): + """Convert the ConfigDict to a normal dictionary recursively, and keep + the ``LazyObject`` or ``LazyAttr`` object not built.""" + + def _to_dict(data): + if isinstance(data, ConfigDict): + return { + key: _to_dict(value) + for key, value in Dict.items(data) + } + elif isinstance(data, dict): + return {key: _to_dict(value) for key, value in data.items()} + elif isinstance(data, (list, tuple)): + return type(data)(_to_dict(item) for item in data) + else: + return data + + return _to_dict(self) + + def to_dict(self): + """Convert the ConfigDict to a normal dictionary recursively, and + convert the ``LazyObject`` or ``LazyAttr`` to string.""" + return _lazy2string(self, dict_type=dict) + + +def add_args(parser: ArgumentParser, + cfg: dict, + prefix: str = '') -> ArgumentParser: + """Add config fields into argument parser. + + Args: + parser (ArgumentParser): Argument parser. + cfg (dict): Config dictionary. + prefix (str, optional): Prefix of parser argument. + Defaults to ''. + + Returns: + ArgumentParser: Argument parser containing config fields. + """ + for k, v in cfg.items(): + if isinstance(v, str): + parser.add_argument('--' + prefix + k) + elif isinstance(v, bool): + parser.add_argument('--' + prefix + k, action='store_true') + elif isinstance(v, int): + parser.add_argument('--' + prefix + k, type=int) + elif isinstance(v, float): + parser.add_argument('--' + prefix + k, type=float) + elif isinstance(v, dict): + add_args(parser, v, prefix + k + '.') + elif isinstance(v, abc.Iterable): + parser.add_argument( + '--' + prefix + k, type=type(next(iter(v))), nargs='+') + return parser + + +class Config: + """A facility for config and config files. + + It supports common file formats as configs: python/json/yaml. + ``Config.fromfile`` can parse a dictionary from a config file, then + build a ``Config`` instance with the dictionary. + The interface is the same as a dict object and also allows access config + values as attributes. + + Args: + cfg_dict (dict, optional): A config dictionary. Defaults to None. + cfg_text (str, optional): Text of config. Defaults to None. + filename (str or Path, optional): Name of config file. + Defaults to None. + format_python_code (bool): Whether to format Python code by yapf. + Defaults to True. + """ # noqa: E501 + + def __init__( + self, + cfg_dict: Optional[dict] = None, + cfg_text: Optional[str] = None, + filename: Optional[Union[str, Path]] = None, + env_variables: Optional[dict] = None, + format_python_code: bool = True, + ): + filename = str(filename) if isinstance(filename, Path) else filename + if cfg_dict is None: + cfg_dict = dict() + elif not isinstance(cfg_dict, dict): + raise TypeError('cfg_dict must be a dict, but ' + f'got {type(cfg_dict)}') + for key in cfg_dict: + if key in RESERVED_KEYS: + raise KeyError(f'{key} is reserved for config file') + + if not isinstance(cfg_dict, ConfigDict): + cfg_dict = ConfigDict(cfg_dict) + super().__setattr__('_cfg_dict', cfg_dict) + super().__setattr__('_filename', filename) + super().__setattr__('_format_python_code', format_python_code) + if not hasattr(self, '_imported_names'): + super().__setattr__('_imported_names', set()) + + if cfg_text: + text = cfg_text + elif filename: + with open(filename, encoding='utf-8') as f: + text = f.read() + else: + text = '' + super().__setattr__('_text', text) + if env_variables is None: + env_variables = dict() + super().__setattr__('_env_variables', env_variables) + + @staticmethod + def fromfile(filename: Union[str, Path], + use_predefined_variables: bool = True, + import_custom_modules: bool = True, + use_environment_variables: bool = True, + lazy_import: Optional[bool] = None, + format_python_code: bool = True) -> 'Config': + """Build a Config instance from config file. + + Args: + filename (str or Path): Name of config file. + use_predefined_variables (bool, optional): Whether to use + predefined variables. Defaults to True. + import_custom_modules (bool, optional): Whether to support + importing custom modules in config. Defaults to None. + use_environment_variables (bool, optional): Whether to use + environment variables. Defaults to True. + lazy_import (bool): Whether to load config in `lazy_import` mode. + If it is `None`, it will be deduced by the content of the + config file. Defaults to None. + format_python_code (bool): Whether to format Python code by yapf. + Defaults to True. + + Returns: + Config: Config instance built from config file. + """ + filename = str(filename) if isinstance(filename, Path) else filename + if lazy_import is False or \ + lazy_import is None and not Config._is_lazy_import(filename): + cfg_dict, cfg_text, env_variables = Config._file2dict( + filename, use_predefined_variables, use_environment_variables, + lazy_import) + if import_custom_modules and cfg_dict.get('custom_imports', None): + try: + import_modules_from_strings(**cfg_dict['custom_imports']) + except ImportError as e: + err_msg = ( + 'Failed to import custom modules from ' + f"{cfg_dict['custom_imports']}, the current sys.path " + 'is: ') + for p in sys.path: + err_msg += f'\n {p}' + err_msg += ( + '\nYou should set `PYTHONPATH` to make `sys.path` ' + 'include the directory which contains your custom ' + 'module') + raise ImportError(err_msg) from e + return Config( + cfg_dict, + cfg_text=cfg_text, + filename=filename, + env_variables=env_variables, + ) + else: + # Enable lazy import when parsing the config. + # Using try-except to make sure ``ConfigDict.lazy`` will be reset + # to False. See more details about lazy in the docstring of + # ConfigDict + ConfigDict.lazy = True + try: + cfg_dict, imported_names = Config._parse_lazy_import(filename) + except Exception as e: + raise e + finally: + # disable lazy import to get the real type. See more details + # about lazy in the docstring of ConfigDict + ConfigDict.lazy = False + + cfg = Config( + cfg_dict, + filename=filename, + format_python_code=format_python_code) + object.__setattr__(cfg, '_imported_names', imported_names) + return cfg + + @staticmethod + def _get_base_modules(nodes: list) -> list: + """Get base module name from parsed code. + + Args: + nodes (list): Parsed code of the config file. + + Returns: + list: Name of base modules. + """ + + def _get_base_module_from_with(with_nodes: list) -> list: + """Get base module name from if statement in python file. + + Args: + with_nodes (list): List of if statement. + + Returns: + list: Name of base modules. + """ + base_modules = [] + for node in with_nodes: + assert isinstance(node, ast.ImportFrom), ( + 'Illegal syntax in config file! Only ' + '`from ... import ...` could be implemented` in ' + 'with read_base()`') + assert node.module is not None, ( + 'Illegal syntax in config file! Syntax like ' + '`from . import xxx` is not allowed in `with read_base()`') + base_modules.append(node.level * '.' + node.module) + return base_modules + + for idx, node in enumerate(nodes): + if (isinstance(node, ast.Assign) + and isinstance(node.targets[0], ast.Name) + and node.targets[0].id == BASE_KEY): + raise SyntaxError( + 'The configuration file type in the inheritance chain ' + 'must match the current configuration file type, either ' + '"lazy_import" or non-"lazy_import". You got this error ' + f'since you use the syntax like `_base_ = "{node.targets[0].id}"` ' # noqa: E501 + 'in your config. You should use `with read_base(): ... to` ' # noqa: E501 + 'mark the inherited config file. See more information '# noqa: E501 + ) + + if not isinstance(node, ast.With): + continue + + expr = node.items[0].context_expr + if (not isinstance(expr, ast.Call) + or not expr.func.id == 'read_base' or # type: ignore + len(node.items) > 1): + raise SyntaxError( + 'Only `read_base` context manager can be used in the ' + 'config') + for nested_idx, nested_node in enumerate(node.body): + nodes.insert(idx + nested_idx + 1, nested_node) + nodes.pop(idx) + return _get_base_module_from_with(node.body) + return [] + + @staticmethod + def _validate_py_syntax(filename: str): + """Validate syntax of python config. + + Args: + filename (str): Filename of python config file. + """ + with open(filename, encoding='utf-8') as f: + content = f.read() + try: + ast.parse(content) + except SyntaxError as e: + raise SyntaxError('There are syntax errors in config ' + f'file {filename}: {e}') + + @staticmethod + def _substitute_predefined_vars(filename: str, temp_config_name: str): + """Substitute predefined variables in config with actual values. + + Sometimes we want some variables in the config to be related to the + current path or file name, etc. + + Here is an example of a typical usage scenario. When training a model, + we define a working directory in the config that save the models and + logs. For different configs, we expect to define different working + directories. A common way for users is to use the config file name + directly as part of the working directory name, e.g. for the config + ``config_setting1.py``, the working directory is + ``. /work_dir/config_setting1``. + + This can be easily achieved using predefined variables, which can be + written in the config `config_setting1.py` as follows + + .. code-block:: python + + work_dir = '. /work_dir/{{ fileBasenameNoExtension }}' + + + Here `{{ fileBasenameNoExtension }}` indicates the file name of the + config (without the extension), and when the config class reads the + config file, it will automatically parse this double-bracketed string + to the corresponding actual value. + + .. code-block:: python + + cfg = Config.fromfile('. /config_setting1.py') + cfg.work_dir # ". /work_dir/config_setting1" + + + For details, Please refer to docs/zh_cn/advanced_tutorials/config.md . + + Args: + filename (str): Filename of config. + temp_config_name (str): Temporary filename to save substituted + config. + """ + file_dirname = osp.dirname(filename) + file_basename = osp.basename(filename) + file_basename_no_extension = osp.splitext(file_basename)[0] + file_extname = osp.splitext(filename)[1] + support_templates = dict( + fileDirname=file_dirname, + fileBasename=file_basename, + fileBasenameNoExtension=file_basename_no_extension, + fileExtname=file_extname) + with open(filename, encoding='utf-8') as f: + config_file = f.read() + for key, value in support_templates.items(): + regexp = r'\{\{\s*' + str(key) + r'\s*\}\}' + value = value.replace('\\', '/') + config_file = re.sub(regexp, value, config_file) + with open(temp_config_name, 'w', encoding='utf-8') as tmp_config_file: + tmp_config_file.write(config_file) + + @staticmethod + def _substitute_env_variables(filename: str, temp_config_name: str): + """Substitute environment variables in config with actual values. + + Sometimes, we want to change some items in the config with environment + variables. For examples, we expect to change dataset root by setting + ``DATASET_ROOT=/dataset/root/path`` in the command line. This can be + easily achieved by writing lines in the config as follows + + .. code-block:: python + + data_root = '{{$DATASET_ROOT:/default/dataset}}/images' + + + Here, ``{{$DATASET_ROOT:/default/dataset}}`` indicates using the + environment variable ``DATASET_ROOT`` to replace the part between + ``{{}}``. If the ``DATASET_ROOT`` is not set, the default value + ``/default/dataset`` will be used. + + Environment variables not only can replace items in the string, they + can also substitute other types of data in config. In this situation, + we can write the config as below + + .. code-block:: python + + model = dict( + bbox_head = dict(num_classes={{'$NUM_CLASSES:80'}})) + + + For details, Please refer to docs/zh_cn/tutorials/config.md . + + Args: + filename (str): Filename of config. + temp_config_name (str): Temporary filename to save substituted + config. + """ + with open(filename, encoding='utf-8') as f: + config_file = f.read() + regexp = r'\{\{[\'\"]?\s*\$(\w+)\s*\:\s*(\S*?)\s*[\'\"]?\}\}' + keys = re.findall(regexp, config_file) + env_variables = dict() + for var_name, value in keys: + regexp = r'\{\{[\'\"]?\s*\$' + var_name + r'\s*\:\s*' \ + + value + r'\s*[\'\"]?\}\}' + if var_name in os.environ: + value = os.environ[var_name] + env_variables[var_name] = value + if not value: + raise KeyError(f'`{var_name}` cannot be found in `os.environ`.' + f' Please set `{var_name}` in environment or ' + 'give a default value.') + config_file = re.sub(regexp, value, config_file) + + with open(temp_config_name, 'w', encoding='utf-8') as tmp_config_file: + tmp_config_file.write(config_file) + return env_variables + + @staticmethod + def _pre_substitute_base_vars(filename: str, + temp_config_name: str) -> dict: + """Preceding step for substituting variables in base config with actual + value. + + Args: + filename (str): Filename of config. + temp_config_name (str): Temporary filename to save substituted + config. + + Returns: + dict: A dictionary contains variables in base config. + """ + with open(filename, encoding='utf-8') as f: + config_file = f.read() + base_var_dict = {} + regexp = r'\{\{\s*' + BASE_KEY + r'\.([\w\.]+)\s*\}\}' + base_vars = set(re.findall(regexp, config_file)) + for base_var in base_vars: + randstr = f'_{base_var}_{uuid.uuid4().hex.lower()[:6]}' + base_var_dict[randstr] = base_var + regexp = r'\{\{\s*' + BASE_KEY + r'\.' + base_var + r'\s*\}\}' + config_file = re.sub(regexp, f'"{randstr}"', config_file) + with open(temp_config_name, 'w', encoding='utf-8') as tmp_config_file: + tmp_config_file.write(config_file) + return base_var_dict + + @staticmethod + def _substitute_base_vars(cfg: Any, base_var_dict: dict, + base_cfg: dict) -> Any: + """Substitute base variables from strings to their actual values. + + Args: + Any : Config dictionary. + base_var_dict (dict): A dictionary contains variables in base + config. + base_cfg (dict): Base config dictionary. + + Returns: + Any : A dictionary with origin base variables + substituted with actual values. + """ + cfg = copy.deepcopy(cfg) + + if isinstance(cfg, dict): + for k, v in cfg.items(): + if isinstance(v, str) and v in base_var_dict: + new_v = base_cfg + for new_k in base_var_dict[v].split('.'): + new_v = new_v[new_k] + cfg[k] = new_v + elif isinstance(v, (list, tuple, dict)): + cfg[k] = Config._substitute_base_vars( + v, base_var_dict, base_cfg) + elif isinstance(cfg, tuple): + cfg = tuple( + Config._substitute_base_vars(c, base_var_dict, base_cfg) + for c in cfg) + elif isinstance(cfg, list): + cfg = [ + Config._substitute_base_vars(c, base_var_dict, base_cfg) + for c in cfg + ] + elif isinstance(cfg, str) and cfg in base_var_dict: + new_v = base_cfg + for new_k in base_var_dict[cfg].split('.'): + new_v = new_v[new_k] + cfg = new_v + + return cfg + + @staticmethod + def _file2dict( + filename: str, + use_predefined_variables: bool = True, + use_environment_variables: bool = True, + lazy_import: Optional[bool] = None) -> Tuple[dict, str, dict]: + """Transform file to variables dictionary. + + Args: + filename (str): Name of config file. + use_predefined_variables (bool, optional): Whether to use + predefined variables. Defaults to True. + use_environment_variables (bool, optional): Whether to use + environment variables. Defaults to True. + lazy_import (bool): Whether to load config in `lazy_import` mode. + If it is `None`, it will be deduced by the content of the + config file. Defaults to None. + + Returns: + Tuple[dict, str]: Variables dictionary and text of Config. + """ + if lazy_import is None and Config._is_lazy_import(filename): + raise RuntimeError( + 'The configuration file type in the inheritance chain ' + 'must match the current configuration file type, either ' + '"lazy_import" or non-"lazy_import". You got this error ' + 'since you use the syntax like `with read_base(): ...` ' + f'or import non-builtin module in {filename}.' # noqa: E501 + ) + + filename = osp.abspath(osp.expanduser(filename)) + check_file_exist(filename) + fileExtname = osp.splitext(filename)[1] + if fileExtname not in ['.py', '.json', '.yaml', '.yml']: + raise OSError('Only py/yml/yaml/json type are supported now!') + try: + with tempfile.TemporaryDirectory() as temp_config_dir: + temp_config_file = tempfile.NamedTemporaryFile( + dir=temp_config_dir, suffix=fileExtname, delete=False) + if platform.system() == 'Windows': + temp_config_file.close() + + # Substitute predefined variables + if use_predefined_variables: + Config._substitute_predefined_vars(filename, + temp_config_file.name) + else: + shutil.copyfile(filename, temp_config_file.name) + # Substitute environment variables + env_variables = dict() + if use_environment_variables: + env_variables = Config._substitute_env_variables( + temp_config_file.name, temp_config_file.name) + # Substitute base variables from placeholders to strings + base_var_dict = Config._pre_substitute_base_vars( + temp_config_file.name, temp_config_file.name) + + # Handle base files + base_cfg_dict = ConfigDict() + cfg_text_list = list() + for base_cfg_path in Config._get_base_files( + temp_config_file.name): + base_cfg_path, scope = Config._get_cfg_path( + base_cfg_path, filename) + _cfg_dict, _cfg_text, _env_variables = Config._file2dict( + filename=base_cfg_path, + use_predefined_variables=use_predefined_variables, + use_environment_variables=use_environment_variables, + lazy_import=lazy_import, + ) + cfg_text_list.append(_cfg_text) + env_variables.update(_env_variables) + duplicate_keys = base_cfg_dict.keys() & _cfg_dict.keys() + if len(duplicate_keys) > 0: + raise KeyError( + 'Duplicate key is not allowed among bases. ' + f'Duplicate keys: {duplicate_keys}') + + # _dict_to_config_dict will do the following things: + # 1. Recursively converts ``dict`` to :obj:`ConfigDict`. + # 2. Set `_scope_` for the outer dict variable for the base + # config. + # 3. Set `scope` attribute for each base variable. + # Different from `_scope_`, `scope` is not a key of base + # dict, `scope` attribute will be parsed to key `_scope_` + # by function `_parse_scope` only if the base variable is + # accessed by the current config. + _cfg_dict = Config._dict_to_config_dict(_cfg_dict, scope) + base_cfg_dict.update(_cfg_dict) + + if filename.endswith('.py'): + with open(temp_config_file.name, encoding='utf-8') as f: + parsed_codes = ast.parse(f.read()) + parsed_codes = RemoveAssignFromAST(BASE_KEY).visit( + parsed_codes) + codeobj = compile(parsed_codes, filename, mode='exec') + # Support load global variable in nested function of the + # config. + global_locals_var = {BASE_KEY: base_cfg_dict} + ori_keys = set(global_locals_var.keys()) + eval(codeobj, global_locals_var, global_locals_var) + cfg_dict = { + key: value + for key, value in global_locals_var.items() + if (key not in ori_keys and not key.startswith('__')) + } + elif filename.endswith(('.yml', '.yaml', '.json')): + cfg = OmegaConf.load(temp_config_file.name) + cfg_dict = OmegaConf.to_container(cfg, resolve=True) + # close temp file + for key, value in list(cfg_dict.items()): + if isinstance(value, + (types.FunctionType, types.ModuleType)): + cfg_dict.pop(key) + temp_config_file.close() + + # If the current config accesses a base variable of base + # configs, The ``scope`` attribute of corresponding variable + # will be converted to the `_scope_`. + Config._parse_scope(cfg_dict) + except Exception as e: + if osp.exists(temp_config_dir): + shutil.rmtree(temp_config_dir) + raise e + + # check deprecation information + if DEPRECATION_KEY in cfg_dict: + deprecation_info = cfg_dict.pop(DEPRECATION_KEY) + warning_msg = f'The config file {filename} will be deprecated ' \ + 'in the future.' + if 'expected' in deprecation_info: + warning_msg += f' Please use {deprecation_info["expected"]} ' \ + 'instead.' + if 'reference' in deprecation_info: + warning_msg += ' More information can be found at ' \ + f'{deprecation_info["reference"]}' + warnings.warn(warning_msg, DeprecationWarning) + + cfg_text = filename + '\n' + with open(filename, encoding='utf-8') as f: + # Setting encoding explicitly to resolve coding issue on windows + cfg_text += f.read() + + # Substitute base variables from strings to their actual values + cfg_dict = Config._substitute_base_vars(cfg_dict, base_var_dict, + base_cfg_dict) + cfg_dict.pop(BASE_KEY, None) + + cfg_dict = Config._merge_a_into_b(cfg_dict, base_cfg_dict) + cfg_dict = { + k: v + for k, v in cfg_dict.items() if not k.startswith('__') + } + + # merge cfg_text + cfg_text_list.append(cfg_text) + cfg_text = '\n'.join(cfg_text_list) + + return cfg_dict, cfg_text, env_variables + + @staticmethod + def _parse_lazy_import(filename: str) -> Tuple[ConfigDict, set]: + """Transform file to variables dictionary. + + Args: + filename (str): Name of config file. + + Returns: + Tuple[dict, dict]: ``cfg_dict`` and ``imported_names``. + + - cfg_dict (dict): Variables dictionary of parsed config. + - imported_names (set): Used to mark the names of + imported object. + """ + # In lazy import mode, users can use the Python syntax `import` to + # implement inheritance between configuration files, which is easier + # for users to understand the hierarchical relationships between + # different configuration files. + + # Besides, users can also using `import` syntax to import corresponding + # module which will be filled in the `type` field. It means users + # can directly navigate to the source of the module in the + # configuration file by clicking the `type` field. + + # To avoid really importing the third party package like `torch` + # during import `type` object, we use `_parse_lazy_import` to parse the + # configuration file, which will not actually trigger the import + # process, but simply parse the imported `type`s as LazyObject objects. + + # The overall pipeline of _parse_lazy_import is: + # 1. Parse the base module from the config file. + # || + # \/ + # base_module = ['mmdet.configs.default_runtime'] + # || + # \/ + # 2. recursively parse the base module and gather imported objects to + # a dict. + # || + # \/ + # The base_dict will be: + # { + # 'mmdet.configs.default_runtime': {...} + # 'mmdet.configs.retinanet_r50_fpn_1x_coco': {...} + # ... + # }, each item in base_dict is a dict of `LazyObject` + # 3. parse the current config file filling the imported variable + # with the base_dict. + # + # 4. During the parsing process, all imported variable will be + # recorded in the `imported_names` set. These variables can be + # accessed, but will not be dumped by default. + + with open(filename, encoding='utf-8') as f: + global_dict = {'LazyObject': LazyObject, '__file__': filename} + base_dict = {} + + parsed_codes = ast.parse(f.read()) + # get the names of base modules, and remove the + # `with read_base():'` statement + base_modules = Config._get_base_modules(parsed_codes.body) + base_imported_names = set() + for base_module in base_modules: + # If base_module means a relative import, assuming the level is + # 2, which means the module is imported like + # "from ..a.b import c". we must ensure that c is an + # object `defined` in module b, and module b should not be a + # package including `__init__` file but a single python file. + level = len(re.match(r'\.*', base_module).group()) + if level > 0: + # Relative import + base_dir = osp.dirname(filename) + module_path = osp.join( + base_dir, *(['..'] * (level - 1)), + f'{base_module[level:].replace(".", "/")}.py') + else: + # Absolute import + module_list = base_module.split('.') + if len(module_list) == 1: + raise SyntaxError( + 'The imported configuration file should not be ' + f'an independent package {module_list[0]}. Here ' + 'is an example: ' + '`with read_base(): from mmdet.configs.retinanet_r50_fpn_1x_coco import *`' # noqa: E501 + ) + else: + package = module_list[0] + root_path = get_installed_path(package) + module_path = f'{osp.join(root_path, *module_list[1:])}.py' # noqa: E501 + if not osp.isfile(module_path): + raise SyntaxError( + f'{module_path} not found! It means that incorrect ' + 'module is defined in ' + f'`with read_base(): = from {base_module} import ...`, please ' # noqa: E501 + 'make sure the base config module is valid ' + 'and is consistent with the prior import ' + 'logic') + _base_cfg_dict, _base_imported_names = Config._parse_lazy_import( # noqa: E501 + module_path) + base_imported_names |= _base_imported_names + # The base_dict will be: + # { + # 'mmdet.configs.default_runtime': {...} + # 'mmdet.configs.retinanet_r50_fpn_1x_coco': {...} + # ... + # } + base_dict[base_module] = _base_cfg_dict + + # `base_dict` contains all the imported modules from `base_cfg`. + # In order to collect the specific imported module from `base_cfg` + # before parse the current file, we using AST Transform to + # transverse the imported module from base_cfg and merge then into + # the global dict. After the ast transformation, most of import + # syntax will be removed (except for the builtin import) and + # replaced with the `LazyObject` + transform = ImportTransformer( + global_dict=global_dict, + base_dict=base_dict, + filename=filename) + modified_code = transform.visit(parsed_codes) + modified_code, abs_imported = _gather_abs_import_lazyobj( + modified_code, filename=filename) + imported_names = transform.imported_obj | abs_imported + imported_names |= base_imported_names + modified_code = ast.fix_missing_locations(modified_code) + exec( + compile(modified_code, filename, mode='exec'), global_dict, + global_dict) + + ret: dict = {} + for key, value in global_dict.items(): + if key.startswith('__') or key in ['LazyObject']: + continue + ret[key] = value + # convert dict to ConfigDict + cfg_dict = Config._dict_to_config_dict_lazy(ret) + + return cfg_dict, imported_names + + @staticmethod + def _dict_to_config_dict_lazy(cfg: dict): + """Recursively converts ``dict`` to :obj:`ConfigDict`. The only + difference between ``_dict_to_config_dict_lazy`` and + ``_dict_to_config_dict_lazy`` is that the former one does not consider + the scope, and will not trigger the building of ``LazyObject``. + + Args: + cfg (dict): Config dict. + + Returns: + ConfigDict: Converted dict. + """ + # Only the outer dict with key `type` should have the key `_scope_`. + if isinstance(cfg, dict): + cfg_dict = ConfigDict() + for key, value in cfg.items(): + cfg_dict[key] = Config._dict_to_config_dict_lazy(value) + return cfg_dict + if isinstance(cfg, (tuple, list)): + return type(cfg)( + Config._dict_to_config_dict_lazy(_cfg) for _cfg in cfg) + return cfg + + @staticmethod + def _dict_to_config_dict(cfg: dict, + scope: Optional[str] = None, + has_scope=True): + """Recursively converts ``dict`` to :obj:`ConfigDict`. + + Args: + cfg (dict): Config dict. + scope (str, optional): Scope of instance. + has_scope (bool): Whether to add `_scope_` key to config dict. + + Returns: + ConfigDict: Converted dict. + """ + # Only the outer dict with key `type` should have the key `_scope_`. + if isinstance(cfg, dict): + if has_scope and 'type' in cfg: + has_scope = False + if scope is not None and cfg.get('_scope_', None) is None: + cfg._scope_ = scope # type: ignore + cfg = ConfigDict(cfg) + dict.__setattr__(cfg, 'scope', scope) + for key, value in cfg.items(): + cfg[key] = Config._dict_to_config_dict( + value, scope=scope, has_scope=has_scope) + elif isinstance(cfg, tuple): + cfg = tuple( + Config._dict_to_config_dict(_cfg, scope, has_scope=has_scope) + for _cfg in cfg) + elif isinstance(cfg, list): + cfg = [ + Config._dict_to_config_dict(_cfg, scope, has_scope=has_scope) + for _cfg in cfg + ] + return cfg + + @staticmethod + def _parse_scope(cfg: dict) -> None: + """Adds ``_scope_`` to :obj:`ConfigDict` instance, which means a base + variable. + + If the config dict already has the scope, scope will not be + overwritten. + + Args: + cfg (dict): Config needs to be parsed with scope. + """ + if isinstance(cfg, ConfigDict): + cfg._scope_ = cfg.scope + elif isinstance(cfg, (tuple, list)): + [Config._parse_scope(value) for value in cfg] + else: + return + + @staticmethod + def _get_base_files(filename: str) -> list: + """Get the base config file. + + Args: + filename (str): The config file. + + Raises: + TypeError: Name of config file. + + Returns: + list: A list of base config. + """ + file_format = osp.splitext(filename)[1] + if file_format == '.py': + Config._validate_py_syntax(filename) + with open(filename, encoding='utf-8') as f: + parsed_codes = ast.parse(f.read()).body + + def is_base_line(c): + return (isinstance(c, ast.Assign) + and isinstance(c.targets[0], ast.Name) + and c.targets[0].id == BASE_KEY) + + base_code = next((c for c in parsed_codes if is_base_line(c)), + None) + if base_code is not None: + base_code = ast.Expression( # type: ignore + body=base_code.value) # type: ignore + base_files = eval(compile(base_code, '', + mode='eval')) # type: ignore + else: + base_files = [] + elif file_format in ('.yml', '.yaml', '.json'): + cfg = OmegaConf.load(filename) + cfg_dict = OmegaConf.to_container(cfg, resolve=True) + base_files = cfg_dict.get(BASE_KEY, []) + else: + raise SyntaxError( + 'The config type should be py, json, yaml or ' + f'yml, but got {file_format}') + base_files = base_files if isinstance(base_files, + list) else [base_files] + return base_files + + @staticmethod + def _get_cfg_path(cfg_path: str, + filename: str) -> Tuple[str, Optional[str]]: + """Get the config path from the current or external package. + + Args: + cfg_path (str): Relative path of config. + filename (str): The config file being parsed. + + Returns: + Tuple[str, str or None]: Path and scope of config. If the config + is not an external config, the scope will be `None`. + """ + if '::' in cfg_path: + # `cfg_path` startswith '::' means an external config path. + # Get package name and relative config path. + scope = cfg_path.partition('::')[0] + package, cfg_path = _get_package_and_cfg_path(cfg_path) + + if not is_installed(package): + raise ModuleNotFoundError( + f'{package} is not installed, please install {package} ' + f'manually') + + # Get installed package path. + package_path = get_installed_path(package) + try: + # Get config path from meta file. + cfg_path = _get_external_cfg_path(package_path, cfg_path) + except ValueError: + # Since base config does not have a metafile, it should be + # concatenated with package path and relative config path. + cfg_path = _get_external_cfg_base_path(package_path, cfg_path) + except FileNotFoundError as e: + raise e + return cfg_path, scope + else: + # Get local config path. + cfg_dir = osp.dirname(filename) + cfg_path = osp.join(cfg_dir, cfg_path) + return cfg_path, None + + @staticmethod + def _merge_a_into_b(a: dict, + b: dict, + allow_list_keys: bool = False) -> dict: + """Merge dict ``a`` into dict ``b`` (non-inplace). + + Values in ``a`` will overwrite ``b``. ``b`` is copied first to avoid + in-place modifications. + + Args: + a (dict): The source dict to be merged into ``b``. + b (dict): The origin dict to be fetch keys from ``a``. + allow_list_keys (bool): If True, int string keys (e.g. '0', '1') + are allowed in source ``a`` and will replace the element of the + corresponding index in b if b is a list. Defaults to False. + + Returns: + dict: The modified dict of ``b`` using ``a``. + + Examples: + # Normally merge a into b. + >>> Config._merge_a_into_b( + ... dict(obj=dict(a=2)), dict(obj=dict(a=1))) + {'obj': {'a': 2}} + + # Delete b first and merge a into b. + >>> Config._merge_a_into_b( + ... dict(obj=dict(_delete_=True, a=2)), dict(obj=dict(a=1))) + {'obj': {'a': 2}} + + # b is a list + >>> Config._merge_a_into_b( + ... {'0': dict(a=2)}, [dict(a=1), dict(b=2)], True) + [{'a': 2}, {'b': 2}] + """ + b = b.copy() + for k, v in a.items(): + if allow_list_keys and k.isdigit() and isinstance(b, list): + k = int(k) + if len(b) <= k: + raise KeyError(f'Index {k} exceeds the length of list {b}') + b[k] = Config._merge_a_into_b(v, b[k], allow_list_keys) + elif isinstance(v, dict): + if k in b and not v.pop(DELETE_KEY, False): + allowed_types: Union[Tuple, type] = ( + dict, list) if allow_list_keys else dict + if not isinstance(b[k], allowed_types): + raise TypeError( + f'{k}={v} in child config cannot inherit from ' + f'base because {k} is a dict in the child config ' + f'but is of type {type(b[k])} in base config. ' + f'You may set `{DELETE_KEY}=True` to ignore the ' + f'base config.') + b[k] = Config._merge_a_into_b(v, b[k], allow_list_keys) + else: + b[k] = ConfigDict(v) + else: + b[k] = v + return b + + @property + def filename(self) -> str: + """Get file name of config.""" + return self._filename + + @property + def text(self) -> str: + """Get config text.""" + return self._text + + @property + def env_variables(self) -> dict: + """Get used environment variables.""" + return self._env_variables + + @property + def pretty_text(self) -> str: + """Get formatted python config text.""" + + indent = 4 + + def _indent(s_, num_spaces): + s = s_.split('\n') + if len(s) == 1: + return s_ + first = s.pop(0) + s = [(num_spaces * ' ') + line for line in s] + s = '\n'.join(s) + s = first + '\n' + s + return s + + def _format_basic_types(k, v, use_mapping=False): + if isinstance(v, str): + v_str = repr(v) + else: + v_str = str(v) + + if use_mapping: + k_str = f"'{k}'" if isinstance(k, str) else str(k) + attr_str = f'{k_str}: {v_str}' + else: + attr_str = f'{str(k)}={v_str}' + attr_str = _indent(attr_str, indent) + + return attr_str + + def _format_list_tuple(k, v, use_mapping=False): + if isinstance(v, list): + left = '[' + right = ']' + else: + left = '(' + right = ')' + + v_str = f'{left}\n' + # check if all items in the list are dict + for item in v: + if isinstance(item, dict): + v_str += f'dict({_indent(_format_dict(item), indent)}),\n' + elif isinstance(item, tuple): + v_str += f'{_indent(_format_list_tuple(None, item), indent)},\n' # noqa: 501 + elif isinstance(item, list): + v_str += f'{_indent(_format_list_tuple(None, item), indent)},\n' # noqa: 501 + elif isinstance(item, str): + v_str += f'{_indent(repr(item), indent)},\n' + else: + v_str += str(item) + ',\n' + if k is None: + return _indent(v_str, indent) + right + if use_mapping: + k_str = f"'{k}'" if isinstance(k, str) else str(k) + attr_str = f'{k_str}: {v_str}' + else: + attr_str = f'{str(k)}={v_str}' + attr_str = _indent(attr_str, indent) + right + return attr_str + + def _contain_invalid_identifier(dict_str): + contain_invalid_identifier = False + for key_name in dict_str: + contain_invalid_identifier |= \ + (not str(key_name).isidentifier()) + return contain_invalid_identifier + + def _format_dict(input_dict, outest_level=False): + r = '' + s = [] + + use_mapping = _contain_invalid_identifier(input_dict) + if use_mapping: + r += '{' + for idx, (k, v) in enumerate( + sorted(input_dict.items(), key=lambda x: str(x[0]))): + is_last = idx >= len(input_dict) - 1 + end = '' if outest_level or is_last else ',' + if isinstance(v, dict): + v_str = '\n' + _format_dict(v) + if use_mapping: + k_str = f"'{k}'" if isinstance(k, str) else str(k) + attr_str = f'{k_str}: dict({v_str}' + else: + attr_str = f'{str(k)}=dict({v_str}' + attr_str = _indent(attr_str, indent) + ')' + end + elif isinstance(v, (list, tuple)): + attr_str = _format_list_tuple(k, v, use_mapping) + end + else: + attr_str = _format_basic_types(k, v, use_mapping) + end + + s.append(attr_str) + r += '\n'.join(s) + if use_mapping: + r += '}' + return r + + cfg_dict = self.to_dict() + text = _format_dict(cfg_dict, outest_level=True) + if self._format_python_code: + # copied from setup.cfg + yapf_style = dict( + based_on_style='pep8', + blank_line_before_nested_class_or_def=True, + split_before_expression_after_opening_paren=True) + try: + from ...utils import digit_version + if digit_version(yapf.__version__) >= digit_version('0.40.2'): + text, _ = FormatCode(text, style_config=yapf_style) + else: + text, _ = FormatCode( + text, style_config=yapf_style, verify=True) + except: # noqa: E722 + raise SyntaxError('Failed to format the config file, please ' + f'check the syntax of: \n{text}') + return text + + def __repr__(self): + return f'Config (path: {self.filename}): {self._cfg_dict.__repr__()}' + + def __len__(self): + return len(self._cfg_dict) + + def __getattr__(self, name: str) -> Any: + return getattr(self._cfg_dict, name) + + def __getitem__(self, name): + return self._cfg_dict.__getitem__(name) + + def __setattr__(self, name, value): + if isinstance(value, dict): + value = ConfigDict(value) + self._cfg_dict.__setattr__(name, value) + + def __setitem__(self, name, value): + if isinstance(value, dict): + value = ConfigDict(value) + self._cfg_dict.__setitem__(name, value) + + def __iter__(self): + return iter(self._cfg_dict) + + def __getstate__( + self + ) -> Tuple[dict, Optional[str], Optional[str], dict, bool, set]: + state = (self._cfg_dict, self._filename, self._text, + self._env_variables, self._format_python_code, + self._imported_names) + return state + + def __deepcopy__(self, memo): + cls = self.__class__ + other = cls.__new__(cls) + memo[id(self)] = other + + for key, value in self.__dict__.items(): + super(Config, other).__setattr__(key, copy.deepcopy(value, memo)) + + return other + + def __copy__(self): + cls = self.__class__ + other = cls.__new__(cls) + other.__dict__.update(self.__dict__) + super(Config, other).__setattr__('_cfg_dict', self._cfg_dict.copy()) + + return other + + copy = __copy__ + + def __setstate__(self, state: Tuple[dict, Optional[str], Optional[str], + dict, bool, set]): + super().__setattr__('_cfg_dict', state[0]) + super().__setattr__('_filename', state[1]) + super().__setattr__('_text', state[2]) + super().__setattr__('_env_variables', state[3]) + super().__setattr__('_format_python_code', state[4]) + super().__setattr__('_imported_names', state[5]) + + def dump(self, file: Optional[Union[str, Path]] = None): + """Dump config to file or return config text. + + Args: + file (str or Path, optional): If not specified, then the object + is dumped to a str, otherwise to a file specified by the filename. + Defaults to None. + + Returns: + str or None: Config text. + """ + file = str(file) if isinstance(file, Path) else file + cfg_dict = self.to_dict() + if file is None: + if self.filename is None or self.filename.endswith('.py'): + return self.pretty_text + else: + file_format = self.filename.split('.')[-1] + return dump(cfg_dict, file_format=file_format) + elif file.endswith('.py'): + with open(file, 'w', encoding='utf-8') as f: + f.write(self.pretty_text) + else: + file_format = file.split('.')[-1] + return dump(cfg_dict, file=file, file_format=file_format) + + @staticmethod + def _is_lazy_import(filename: str) -> bool: + if not filename.endswith('.py'): + return False + with open(filename, encoding='utf-8') as f: + codes_str = f.read() + parsed_codes = ast.parse(codes_str) + for node in ast.walk(parsed_codes): + if (isinstance(node, ast.Assign) + and isinstance(node.targets[0], ast.Name) + and node.targets[0].id == BASE_KEY): + return False + + if isinstance(node, ast.With): + expr = node.items[0].context_expr + if (not isinstance(expr, ast.Call) + or not expr.func.id == 'read_base'): # type: ignore + raise SyntaxError( + 'Only `read_base` context manager can be used in the ' + 'config') + return True + if isinstance(node, ast.ImportFrom): + # relative import -> lazy_import + if node.level != 0: + return True + # Skip checking when using `mmengine.config` in cfg file + if (node.module == 'mmengine' and len(node.names) == 1 + and node.names[0].name == 'Config'): + continue + if not isinstance(node.module, str): + continue + # non-builtin module -> lazy_import + if not _is_builtin_module(node.module): + return True + if isinstance(node, ast.Import): + for alias_node in node.names: + if not _is_builtin_module(alias_node.name): + return True + return False + + def _to_lazy_dict(self, keep_imported: bool = False) -> dict: + """Convert config object to dictionary with lazy object, and filter the + imported object.""" + res = self._cfg_dict._to_lazy_dict() + if hasattr(self, '_imported_names') and not keep_imported: + res = { + key: value + for key, value in res.items() + if key not in self._imported_names + } + return res + + def to_dict(self, keep_imported: bool = False): + """Convert all data in the config to a builtin ``dict``. + + Args: + keep_imported (bool): Whether to keep the imported field. + Defaults to False + + If you import third-party objects in the config file, all imported + objects will be converted to a string like ``torch.optim.SGD`` + """ + cfg_dict = self._cfg_dict.to_dict() + if hasattr(self, '_imported_names') and not keep_imported: + cfg_dict = { + key: value + for key, value in cfg_dict.items() + if key not in self._imported_names + } + return cfg_dict \ No newline at end of file diff --git a/segformer_plusplus/configs/config/lazy.py b/segformer_plusplus/configs/config/lazy.py new file mode 100644 index 0000000000000000000000000000000000000000..0a3e85baea611fce22b5c3cb01aeecbad1eebe90 --- /dev/null +++ b/segformer_plusplus/configs/config/lazy.py @@ -0,0 +1,267 @@ +import importlib +from typing import Any, Optional, Union, Type +from collections import abc + + +class LazyObject: + """LazyObject is used to lazily initialize the imported module during + parsing the configuration file. + + During parsing process, the syntax like: + + Examples: + >>> import torch.nn as nn + >>> from mmdet.models import RetinaNet + >>> import mmcls.models + >>> import mmcls.datasets + >>> import mmcls + + Will be parsed as: + + Examples: + >>> # import torch.nn as nn + >>> nn = lazyObject('torch.nn') + >>> # from mmdet.models import RetinaNet + >>> RetinaNet = lazyObject('mmdet.models', 'RetinaNet') + >>> # import mmcls.models; import mmcls.datasets; import mmcls + >>> mmcls = lazyObject(['mmcls', 'mmcls.datasets', 'mmcls.models']) + + ``LazyObject`` records all module information and will be further + referenced by the configuration file. + + Args: + module (str or list or tuple): The module name to be imported. + imported (str, optional): The imported module name. Defaults to None. + location (str, optional): The filename and line number of the imported + module statement happened. + """ + + def __init__(self, + module: Union[str, list, tuple], + imported: Optional[str] = None, + location: Optional[str] = None): + if not isinstance(module, str) and not is_seq_of(module, str): + raise TypeError('module should be `str`, `list`, or `tuple`' + f'but got {type(module)}, this might be ' + 'a bug, please report it') + self._module: Union[str, list, tuple] = module + + if not isinstance(imported, str) and imported is not None: + raise TypeError('imported should be `str` or None, but got ' + f'{type(imported)}, this might be ' + 'a bug , please report it') + self._imported = imported + self.location = location + + def build(self) -> Any: + """Return imported object. + + Returns: + Any: Imported object + """ + if isinstance(self._module, str): + try: + module = importlib.import_module(self._module) + except Exception as e: + raise type(e)(f'Failed to import {self._module} ' + f'in {self.location} for {e}') + + if self._imported is not None: + if hasattr(module, self._imported): + module = getattr(module, self._imported) + else: + raise ImportError( + f'Failed to import {self._imported} ' + f'from {self._module} in {self.location}') + + return module + else: + try: + for module in self._module: + importlib.import_module(module) # type: ignore + module_name = self._module[0].split('.')[0] + return importlib.import_module(module_name) + except Exception as e: + raise type(e)(f'Failed to import {self.module} ' + f'in {self.location} for {e}') + + @property + def module(self): + if isinstance(self._module, str): + return self._module + return self._module[0].split('.')[0] + + def __call__(self, *args, **kwargs): + raise RuntimeError() + + def __deepcopy__(self, memo): + return LazyObject(self._module, self._imported, self.location) + + def __getattr__(self, name): + # Cannot locate the line number of the getting attribute. + # Therefore only record the filename. + if self.location is not None: + location = self.location.split(', line')[0] + else: + location = self.location + return LazyAttr(name, self, location) + + def __str__(self) -> str: + if self._imported is not None: + return self._imported + return self.module + + __repr__ = __str__ + + # `pickle.dump` will try to get the `__getstate__` and `__setstate__` + # methods of the dumped object. If these two methods are not defined, + # LazyObject will return a `__getstate__` LazyObject` or `__setstate__` + # LazyObject. + def __getstate__(self): + return self.__dict__ + + def __setstate__(self, state): + self.__dict__ = state + + +class LazyAttr: + """The attribute of the LazyObject. + + When parsing the configuration file, the imported syntax will be + parsed as the assignment ``LazyObject``. During the subsequent parsing + process, users may reference the attributes of the LazyObject. + To ensure that these attributes also contain information needed to + reconstruct the attribute itself, LazyAttr was introduced. + + Examples: + >>> models = LazyObject(['mmdet.models']) + >>> model = dict(type=models.RetinaNet) + >>> print(type(model['type'])) # + >>> print(model['type'].build()) # + """ # noqa: E501 + + def __init__(self, + name: str, + source: Union['LazyObject', 'LazyAttr'], + location=None): + self.name = name + self.source: Union[LazyAttr, LazyObject] = source + + if isinstance(self.source, LazyObject): + if isinstance(self.source._module, str): + if self.source._imported is None: + # source code: + # from xxx.yyy import zzz + # equivalent code: + # zzz = LazyObject('xxx.yyy', 'zzz') + # The source code of get attribute: + # eee = zzz.eee + # Then, `eee._module` should be "xxx.yyy.zzz" + self._module = self.source._module + else: + # source code: + # import xxx.yyy as zzz + # equivalent code: + # zzz = LazyObject('xxx.yyy') + # The source code of get attribute: + # eee = zzz.eee + # Then, `eee._module` should be "xxx.yyy" + self._module = f'{self.source._module}.{self.source}' + else: + # The source code of LazyObject should be + # 1. import xxx.yyy + # 2. import xxx.zzz + # Equivalent to + # xxx = LazyObject(['xxx.yyy', 'xxx.zzz']) + + # The source code of LazyAttr should be + # eee = xxx.eee + # Then, eee._module = xxx + self._module = str(self.source) + elif isinstance(self.source, LazyAttr): + # 1. import xxx + # 2. zzz = xxx.yyy.zzz + + # Equivalent to: + # xxx = LazyObject('xxx') + # zzz = xxx.yyy.zzz + # zzz._module = xxx.yyy._module + zzz.name + self._module = f'{self.source._module}.{self.source.name}' + self.location = location + + @property + def module(self): + return self._module + + def __call__(self, *args, **kwargs: Any) -> Any: + raise RuntimeError() + + def __getattr__(self, name: str) -> 'LazyAttr': + return LazyAttr(name, self) + + def __deepcopy__(self, memo): + return LazyAttr(self.name, self.source) + + def build(self) -> Any: + """Return the attribute of the imported object. + + Returns: + Any: attribute of the imported object. + """ + obj = self.source.build() + try: + return getattr(obj, self.name) + except AttributeError: + raise ImportError(f'Failed to import {self.module}.{self.name} in ' + f'{self.location}') + except ImportError as e: + raise e + + def __str__(self) -> str: + return self.name + + __repr__ = __str__ + + # `pickle.dump` will try to get the `__getstate__` and `__setstate__` + # methods of the dumped object. If these two methods are not defined, + # LazyAttr will return a `__getstate__` LazyAttr` or `__setstate__` + # LazyAttr. + def __getstate__(self): + return self.__dict__ + + def __setstate__(self, state): + self.__dict__ = state + + +def is_seq_of(seq: Any, + expected_type: Union[Type, tuple], + seq_type: Optional[Type] = None) -> bool: + """Check whether it is a sequence of some type. + + Args: + seq (Sequence): The sequence to be checked. + expected_type (type or tuple): Expected type of sequence items. + seq_type (type, optional): Expected sequence type. Defaults to None. + + Returns: + bool: Return True if ``seq`` is valid else False. + + Examples: + >>> from mmengine.utils import is_seq_of + >>> seq = ['a', 'b', 'c'] + >>> is_seq_of(seq, str) + True + >>> is_seq_of(seq, int) + False + """ + if seq_type is None: + exp_seq_type = abc.Sequence + else: + assert isinstance(seq_type, type) + exp_seq_type = seq_type + if not isinstance(seq, exp_seq_type): + return False + for item in seq: + if not isinstance(item, expected_type): + return False + return True \ No newline at end of file diff --git a/segformer_plusplus/configs/config/utils.py b/segformer_plusplus/configs/config/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..b65bbdb1c058bb7e7f1c6c62e4d9eb869b11521d --- /dev/null +++ b/segformer_plusplus/configs/config/utils.py @@ -0,0 +1,647 @@ +import ast +import os.path as osp +import re +import sys +import warnings +from collections import defaultdict +from importlib.util import find_spec +from typing import List, Optional, Tuple, Union +from importlib import import_module as real_import_module +import json +import pickle +from pathlib import Path +from mim.utils import package2module + +import yaml +from omegaconf import OmegaConf + + +PYTHON_ROOT_DIR = osp.dirname(osp.dirname(sys.executable)) +SYSTEM_PYTHON_PREFIX = '/usr/lib/python' + +MODULE2PACKAGE = { + 'mmcls': 'mmcls', + 'mmdet': 'mmdet', + 'mmdet3d': 'mmdet3d', + 'mmseg': 'mmsegmentation', + 'mmaction': 'mmaction2', + 'mmtrack': 'mmtrack', + 'mmpose': 'mmpose', + 'mmedit': 'mmedit', + 'mmocr': 'mmocr', + 'mmgen': 'mmgen', + 'mmfewshot': 'mmfewshot', + 'mmrazor': 'mmrazor', + 'mmflow': 'mmflow', + 'mmhuman3d': 'mmhuman3d', + 'mmrotate': 'mmrotate', + 'mmselfsup': 'mmselfsup', + 'mmyolo': 'mmyolo', + 'mmpretrain': 'mmpretrain', + 'mmagic': 'mmagic', +} + +# PKG2PROJECT is not a proper name to represent the mapping between module name +# (module import from) and package name (used by pip install). Therefore, +# PKG2PROJECT will be deprecated and this alias will only be kept until +# MMEngine v1.0.0 +PKG2PROJECT = MODULE2PACKAGE + + +class ConfigParsingError(RuntimeError): + """Raise error when failed to parse pure Python style config files.""" + + +def _get_cfg_metainfo(package_path: str, cfg_path: str) -> dict: + """Get target meta information from all 'metafile.yml' defined in `mode- + index.yml` of external package. + + Args: + package_path (str): Path of external package. + cfg_path (str): Name of experiment config. + + Returns: + dict: Meta information of target experiment. + """ + meta_index_path = osp.join(package_path, '.mim', 'model-index.yml') + meta_index = OmegaConf.to_container(OmegaConf.load(meta_index_path), resolve=True) + cfg_dict = dict() + for meta_path in meta_index['Import']: + meta_path = osp.join(package_path, '.mim', meta_path) + cfg_meta = OmegaConf.to_container(OmegaConf.load(meta_path), resolve=True) + for model_cfg in cfg_meta['Models']: + if 'Config' not in model_cfg: + warnings.warn(f'There is not `Config` define in {model_cfg}') + continue + cfg_name = model_cfg['Config'].partition('/')[-1] + # Some config could have multiple weights, we only pick the + # first one. + if cfg_name in cfg_dict: + continue + cfg_dict[cfg_name] = model_cfg + if cfg_path not in cfg_dict: + raise ValueError(f'Expected configs: {cfg_dict.keys()}, but got ' + f'{cfg_path}') + return cfg_dict[cfg_path] + + +def _get_external_cfg_path(package_path: str, cfg_file: str) -> str: + """Get config path of external package. + + Args: + package_path (str): Path of external package. + cfg_file (str): Name of experiment config. + + Returns: + str: Absolute config path from external package. + """ + cfg_file = cfg_file.split('.')[0] + model_cfg = _get_cfg_metainfo(package_path, cfg_file) + cfg_path = osp.join(package_path, model_cfg['Config']) + check_file_exist(cfg_path) + return cfg_path + + +def _get_external_cfg_base_path(package_path: str, cfg_name: str) -> str: + """Get base config path of external package. + + Args: + package_path (str): Path of external package. + cfg_name (str): External relative config path with 'package::'. + + Returns: + str: Absolute config path from external package. + """ + cfg_path = osp.join(package_path, '.mim', 'configs', cfg_name) + check_file_exist(cfg_path) + return cfg_path + + +def _get_package_and_cfg_path(cfg_path: str) -> Tuple[str, str]: + """Get package name and relative config path. + + Args: + cfg_path (str): External relative config path with 'package::'. + + Returns: + Tuple[str, str]: Package name and config path. + """ + if re.match(r'\w*::\w*/\w*', cfg_path) is None: + raise ValueError( + '`_get_package_and_cfg_path` is used for get external package, ' + 'please specify the package name and relative config path, just ' + 'like `mmdet::faster_rcnn/faster-rcnn_r50_fpn_1x_coco.py`') + package_cfg = cfg_path.split('::') + if len(package_cfg) > 2: + raise ValueError('`::` should only be used to separate package and ' + 'config name, but found multiple `::` in ' + f'{cfg_path}') + package, cfg_path = package_cfg + assert package in MODULE2PACKAGE, ( + f'mmengine does not support to load {package} config.') + package = MODULE2PACKAGE[package] + return package, cfg_path + + +class RemoveAssignFromAST(ast.NodeTransformer): + """Remove Assign node if the target's name match the key. + + Args: + key (str): The target name of the Assign node. + """ + + def __init__(self, key): + self.key = key + + def visit_Assign(self, node): + if (isinstance(node.targets[0], ast.Name) + and node.targets[0].id == self.key): + return None + else: + return node + + +def _is_builtin_module(module_name: str) -> bool: + """Check if a module is a built-in module. + + Arg: + module_name: name of module. + """ + if module_name.startswith('.'): + return False + if module_name.startswith('mmengine.config'): + return True + if module_name in sys.builtin_module_names: + return True + spec = find_spec(module_name.split('.')[0]) + # Module not found + if spec is None: + return False + origin_path = getattr(spec, 'origin', None) + if origin_path is None: + return False + origin_path = osp.abspath(origin_path) + if ('site-package' in origin_path or 'dist-package' in origin_path + or not origin_path.startswith( + (PYTHON_ROOT_DIR, SYSTEM_PYTHON_PREFIX))): + return False + else: + return True + + +class ImportTransformer(ast.NodeTransformer): + """Convert the import syntax to the assignment of + :class:`mmengine.config.LazyObject` and preload the base variable before + parsing the configuration file. + + Since you are already looking at this part of the code, I believe you must + be interested in the mechanism of the ``lazy_import`` feature of + :class:`Config`. In this docstring, we will dive deeper into its + principles. + + Most of OpenMMLab users maybe bothered with that: + + * In most of popular IDEs, they cannot navigate to the source code in + configuration file + * In most of popular IDEs, they cannot jump to the base file in current + configuration file, which is much painful when the inheritance + relationship is complex. + + In order to solve this problem, we introduce the ``lazy_import`` mode. + + A very intuitive idea for solving this problem is to import the module + corresponding to the "type" field using the ``import`` syntax. Similarly, + we can also ``import`` base file. + + However, this approach has a significant drawback. It requires triggering + the import logic to parse the configuration file, which can be + time-consuming. Additionally, it implies downloading numerous dependencies + solely for the purpose of parsing the configuration file. + However, it's possible that only a portion of the config will actually be + used. For instance, the package used in the ``train_pipeline`` may not + be necessary for an evaluation task. Forcing users to download these + unused packages is not a desirable solution. + + To avoid this problem, we introduce :class:`mmengine.config.LazyObject` and + :class:`mmengine.config.LazyAttr`. Before we proceed with further + explanations, you may refer to the documentation of these two modules to + gain an understanding of their functionalities. + + Actually, one of the functions of ``ImportTransformer`` is to hack the + ``import`` syntax. It will replace the import syntax + (exclude import the base files) with the assignment of ``LazyObject``. + + As for the import syntax of the base file, we cannot lazy import it since + we're eager to merge the fields of current file and base files. Therefore, + another function of the ``ImportTransformer`` is to collaborate with + ``Config._parse_lazy_import`` to parse the base files. + + Args: + global_dict (dict): The global dict of the current configuration file. + If we divide ordinary Python syntax into two parts, namely the + import section and the non-import section (assuming a simple case + with imports at the beginning and the rest of the code following), + the variables generated by the import statements are stored in + global variables for subsequent code use. In this context, + the ``global_dict`` represents the global variables required when + executing the non-import code. ``global_dict`` will be filled + during visiting the parsed code. + base_dict (dict): All variables defined in base files. + + Examples: + >>> from mmengine.config import read_base + >>> + >>> + >>> with read_base(): + >>> from .._base_.default_runtime import * + >>> from .._base_.datasets.coco_detection import dataset + + In this case, the base_dict will be: + + Examples: + >>> base_dict = { + >>> '.._base_.default_runtime': ... + >>> '.._base_.datasets.coco_detection': dataset} + + and `global_dict` will be updated like this: + + Examples: + >>> global_dict.update(base_dict['.._base_.default_runtime']) # `import *` means update all data + >>> global_dict.update(dataset=base_dict['.._base_.datasets.coco_detection']['dataset']) # only update `dataset` + """ # noqa: E501 + + def __init__(self, + global_dict: dict, + base_dict: Optional[dict] = None, + filename: Optional[str] = None): + self.base_dict = base_dict if base_dict is not None else {} + self.global_dict = global_dict + # In Windows, the filename could be like this: + # "C:\\Users\\runneradmin\\AppData\\Local\\" + # Although it has been an raw string, ast.parse will firstly escape + # it as the executed code: + # "C:\Users\runneradmin\AppData\Local\\\" + # As you see, the `\U` will be treated as a part of + # the escape sequence during code parsing, leading to an + # parsing error + # Here we use `encode('unicode_escape').decode()` for double escaping + if isinstance(filename, str): + filename = filename.encode('unicode_escape').decode() + self.filename = filename + self.imported_obj: set = set() + super().__init__() + + def visit_ImportFrom( + self, node: ast.ImportFrom + ) -> Optional[Union[List[ast.Assign], ast.ImportFrom]]: + """Hack the ``from ... import ...`` syntax and update the global_dict. + + Examples: + >>> from mmdet.models import RetinaNet + + Will be parsed as: + + Examples: + >>> RetinaNet = lazyObject('mmdet.models', 'RetinaNet') + + ``global_dict`` will also be updated by ``base_dict`` as the + class docstring says. + + Args: + node (ast.AST): The node of the current import statement. + + Returns: + Optional[List[ast.Assign]]: There three cases: + + * If the node is a statement of importing base files. + None will be returned. + * If the node is a statement of importing a builtin module, + node will be directly returned + * Otherwise, it will return the assignment statements of + ``LazyObject``. + """ + # Built-in modules will not be parsed as LazyObject + module = f'{node.level*"."}{node.module}' + if _is_builtin_module(module): + # Make sure builtin module will be added into `self.imported_obj` + for alias in node.names: + if alias.asname is not None: + self.imported_obj.add(alias.asname) + elif alias.name == '*': + raise ConfigParsingError( + 'Cannot import * from non-base config') + else: + self.imported_obj.add(alias.name) + return node + + if module in self.base_dict: + for alias_node in node.names: + if alias_node.name == '*': + self.global_dict.update(self.base_dict[module]) + return None + if alias_node.asname is not None: + base_key = alias_node.asname + else: + base_key = alias_node.name + self.global_dict[base_key] = self.base_dict[module][ + alias_node.name] + return None + + nodes: List[ast.Assign] = [] + for alias_node in node.names: + # `ast.alias` has lineno attr after Python 3.10, + if hasattr(alias_node, 'lineno'): + lineno = alias_node.lineno + else: + lineno = node.lineno + if alias_node.name == '*': + # TODO: If users import * from a non-config module, it should + # fallback to import the real module and raise a warning to + # remind users the real module will be imported which will slow + # down the parsing speed. + raise ConfigParsingError( + 'Illegal syntax in config! `from xxx import *` is not ' + 'allowed to appear outside the `if base:` statement') + elif alias_node.asname is not None: + # case1: + # from mmengine.dataset import BaseDataset as Dataset -> + # Dataset = LazyObject('mmengine.dataset', 'BaseDataset') + code = f'{alias_node.asname} = LazyObject("{module}", "{alias_node.name}", "{self.filename}, line {lineno}")' # noqa: E501 + self.imported_obj.add(alias_node.asname) + else: + # case2: + # from mmengine.model import BaseModel + # BaseModel = LazyObject('mmengine.model', 'BaseModel') + code = f'{alias_node.name} = LazyObject("{module}", "{alias_node.name}", "{self.filename}, line {lineno}")' # noqa: E501 + self.imported_obj.add(alias_node.name) + try: + nodes.append(ast.parse(code).body[0]) # type: ignore + except Exception as e: + raise ConfigParsingError( + f'Cannot import {alias_node} from {module}' + '1. Cannot import * from 3rd party lib in the config ' + 'file\n' + '2. Please check if the module is a base config which ' + 'should be added to `_base_`\n') from e + return nodes + + def visit_Import(self, node) -> Union[ast.Assign, ast.Import]: + """Work with ``_gather_abs_import_lazyobj`` to hack the ``import ...`` + syntax. + + Examples: + >>> import mmcls.models + >>> import mmcls.datasets + >>> import mmcls + + Will be parsed as: + + Examples: + >>> # import mmcls.models; import mmcls.datasets; import mmcls + >>> mmcls = lazyObject(['mmcls', 'mmcls.datasets', 'mmcls.models']) + + Args: + node (ast.AST): The node of the current import statement. + + Returns: + ast.Assign: If the import statement is ``import ... as ...``, + ast.Assign will be returned, otherwise node will be directly + returned. + """ + # For absolute import like: `import mmdet.configs as configs`. + # It will be parsed as: + # configs = LazyObject('mmdet.configs') + # For absolute import like: + # `import mmdet.configs` + # `import mmdet.configs.default_runtime` + # This will be parsed as + # mmdet = LazyObject(['mmdet.configs.default_runtime', 'mmdet.configs]) + # However, visit_Import cannot gather other import information, so + # `_gather_abs_import_LazyObject` will gather all import information + # from the same module and construct the LazyObject. + alias_list = node.names + assert len(alias_list) == 1, ( + 'Illegal syntax in config! import multiple modules in one line is ' + 'not supported') + # TODO Support multiline import + alias = alias_list[0] + if alias.asname is not None: + self.imported_obj.add(alias.asname) + if _is_builtin_module(alias.name.split('.')[0]): + return node + return ast.parse( # type: ignore + f'{alias.asname} = LazyObject(' + f'"{alias.name}",' + f'location="{self.filename}, line {node.lineno}")').body[0] + return node + + +def _gather_abs_import_lazyobj(tree: ast.Module, + filename: Optional[str] = None): + """Experimental implementation of gathering absolute import information.""" + if isinstance(filename, str): + filename = filename.encode('unicode_escape').decode() + imported = defaultdict(list) + abs_imported = set() + new_body: List[ast.stmt] = [] + # module2node is used to get lineno when Python < 3.10 + module2node: dict = dict() + for node in tree.body: + if isinstance(node, ast.Import): + for alias in node.names: + # Skip converting built-in module to LazyObject + if _is_builtin_module(alias.name): + new_body.append(node) + continue + module = alias.name.split('.')[0] + module2node.setdefault(module, node) + imported[module].append(alias) + continue + new_body.append(node) + + for key, value in imported.items(): + names = [_value.name for _value in value] + if hasattr(value[0], 'lineno'): + lineno = value[0].lineno + else: + lineno = module2node[key].lineno + lazy_module_assign = ast.parse( + f'{key} = LazyObject({names}, location="{filename}, line {lineno}")' # noqa: E501 + ) # noqa: E501 + abs_imported.add(key) + new_body.insert(0, lazy_module_assign.body[0]) + tree.body = new_body + return tree, abs_imported + + +def get_installed_path(package: str) -> str: + """Get installed path of package. + + Args: + package (str): Name of package. + + Example: + >>> get_installed_path('mmcls') + >>> '.../lib/python3.7/site-packages/mmcls' + """ + import importlib.util + + from pkg_resources import DistributionNotFound, get_distribution + + # if the package name is not the same as module name, module name should be + # inferred. For example, mmcv-full is the package name, but mmcv is module + # name. If we want to get the installed path of mmcv-full, we should concat + # the pkg.location and module name + try: + pkg = get_distribution(package) + except DistributionNotFound as e: + # if the package is not installed, package path set in PYTHONPATH + # can be detected by `find_spec` + spec = importlib.util.find_spec(package) + if spec is not None: + if spec.origin is not None: + return osp.dirname(spec.origin) + else: + # `get_installed_path` cannot get the installed path of + # namespace packages + raise RuntimeError( + f'{package} is a namespace package, which is invalid ' + 'for `get_install_path`') + else: + raise e + + possible_path = osp.join(pkg.location, package) # type: ignore + if osp.exists(possible_path): + return possible_path + else: + return osp.join(pkg.location, package2module(package)) # type: ignore + + +def import_modules_from_strings(imports, allow_failed_imports=False): + """Import modules from the given list of strings. + + Args: + imports (list | str | None): The given module names to be imported. + allow_failed_imports (bool): If True, the failed imports will return + None. Otherwise, an ImportError is raise. Defaults to False. + + Returns: + list[module] | module | None: The imported modules. + + Examples: + >>> osp, sys = import_modules_from_strings( + ... ['os.path', 'sys']) + >>> import os.path as osp_ + >>> import sys as sys_ + >>> assert osp == osp_ + >>> assert sys == sys_ + """ + if not imports: + return + single_import = False + if isinstance(imports, str): + single_import = True + imports = [imports] + if not isinstance(imports, list): + raise TypeError( + f'custom_imports must be a list but got type {type(imports)}') + imported = [] + for imp in imports: + if not isinstance(imp, str): + raise TypeError( + f'{imp} is of type {type(imp)} and cannot be imported.') + try: + imported_tmp = import_module(imp) + except ImportError: + if allow_failed_imports: + warnings.warn(f'{imp} failed to import and is ignored.', + UserWarning) + imported_tmp = None + else: + raise ImportError(f'Failed to import {imp}') + imported.append(imported_tmp) + if single_import: + imported = imported[0] + return imported + + +def import_module(name, package=None): + """Import a module, optionally supporting relative imports.""" + return real_import_module(name, package) + + +def is_installed(package: str) -> bool: + """Check package whether installed. + + Args: + package (str): Name of package to be checked. + """ + # When executing `import mmengine.runner`, + # pkg_resources will be imported and it takes too much time. + # Therefore, import it in function scope to save time. + import importlib.util + import pkg_resources + from pkg_resources import get_distribution + + # refresh the pkg_resources + # more datails at https://github.com/pypa/setuptools/issues/373 + importlib.reload(pkg_resources) + try: + get_distribution(package) + return True + except pkg_resources.DistributionNotFound: + spec = importlib.util.find_spec(package) + if spec is None: + return False + elif spec.origin is not None: + return True + else: + return False + + +def dump(obj, file=None, file_format=None, **kwargs): + """Dump data to json/yaml/pickle strings or files (mmengine-like replacement).""" + if isinstance(file, Path): + file = str(file) + + # Guess file format if not explicitly given + if file_format is None: + if isinstance(file, str): + file_format = file.split('.')[-1].lower() + elif file is None: + raise ValueError("file_format must be specified if file is None") + + if file_format not in ['json', 'yaml', 'yml', 'pkl', 'pickle']: + raise TypeError(f"Unsupported file format: {file_format}") + + # Convert YAML extension + if file_format == 'yml': + file_format = 'yaml' + if file_format == 'pickle': + file_format = 'pkl' + + # Handle output to string + if file is None: + if file_format == 'json': + return json.dumps(obj, indent=4, **kwargs) + elif file_format == 'yaml': + return yaml.dump(obj, **kwargs) + elif file_format == 'pkl': + return pickle.dumps(obj, **kwargs) + + # Handle output to file + mode = 'w' if file_format in ['json', 'yaml'] else 'wb' + with open(file, mode, encoding='utf-8' if 'b' not in mode else None) as f: + if file_format == 'json': + json.dump(obj, f, indent=4, **kwargs) + elif file_format == 'yaml': + yaml.dump(obj, f, **kwargs) + elif file_format == 'pkl': + pickle.dump(obj, f, **kwargs) + + return True + + +def check_file_exist(filename, msg_tmpl='file "{}" does not exist'): + if not osp.isfile(filename): + raise FileNotFoundError(msg_tmpl.format(filename)) \ No newline at end of file diff --git a/segformer_plusplus/configs/segformer_mit_b0.py b/segformer_plusplus/configs/segformer_mit_b0.py new file mode 100644 index 0000000000000000000000000000000000000000..f4eb059010ad97d0360e779cbcca82e3a2b50717 --- /dev/null +++ b/segformer_plusplus/configs/segformer_mit_b0.py @@ -0,0 +1,28 @@ +norm_cfg = dict(type='SyncBN', requires_grad=True) +backbone = dict( + type='MixVisionTransformer', + in_channels=3, + embed_dims=32, + num_stages=4, + num_layers=[2, 2, 2, 2], + num_heads=[1, 2, 5, 8], + patch_sizes=[7, 3, 3, 3], + sr_ratios=[8, 4, 2, 1], + out_indices=(0, 1, 2, 3), + mlp_ratio=4, + qkv_bias=True, + drop_rate=0.0, + attn_drop_rate=0.0, + drop_path_rate=0.1 +) +decode_head = dict( + type='SegformerHead', + in_channels=[32, 64, 160, 256], + in_index=[0, 1, 2, 3], + channels=256, + dropout_ratio=0.1, + out_channels=19, + norm_cfg=norm_cfg, + align_corners=False, + interpolate_mode='bilinear' +) diff --git a/segformer_plusplus/configs/segformer_mit_b1.py b/segformer_plusplus/configs/segformer_mit_b1.py new file mode 100644 index 0000000000000000000000000000000000000000..4ec3214aff7f298cf429b836e51e84cea5aeb771 --- /dev/null +++ b/segformer_plusplus/configs/segformer_mit_b1.py @@ -0,0 +1,8 @@ +_base_ = ['./segformer_mit_b0.py'] + +backbone = dict( + embed_dims=64, +) +decode_head = dict( + in_channels=[64, 128, 320, 512] +) diff --git a/segformer_plusplus/configs/segformer_mit_b2.py b/segformer_plusplus/configs/segformer_mit_b2.py new file mode 100644 index 0000000000000000000000000000000000000000..230e1e50daba53993126d58efc9bed642cb9f4ca --- /dev/null +++ b/segformer_plusplus/configs/segformer_mit_b2.py @@ -0,0 +1,6 @@ +_base_ = ['./segformer_mit_b1.py'] + +backbone = dict( + embed_dims=64, + num_layers=[3, 4, 6, 3] +) diff --git a/segformer_plusplus/configs/segformer_mit_b3.py b/segformer_plusplus/configs/segformer_mit_b3.py new file mode 100644 index 0000000000000000000000000000000000000000..5c877f9d12459c9508a7627a3131144e94768856 --- /dev/null +++ b/segformer_plusplus/configs/segformer_mit_b3.py @@ -0,0 +1,6 @@ +_base_ = ['./segformer_mit_b1.py'] + +backbone = dict( + embed_dims=64, + num_layers=[3, 4, 18, 3] +) diff --git a/segformer_plusplus/configs/segformer_mit_b4.py b/segformer_plusplus/configs/segformer_mit_b4.py new file mode 100644 index 0000000000000000000000000000000000000000..897e59765578c96a5a9a17ffb8cf6aceecc81e3b --- /dev/null +++ b/segformer_plusplus/configs/segformer_mit_b4.py @@ -0,0 +1,6 @@ +_base_ = ['./segformer_mit_b1.py'] + +backbone = dict( + embed_dims=64, + num_layers=[3, 8, 27, 3] +) diff --git a/segformer_plusplus/configs/segformer_mit_b5.py b/segformer_plusplus/configs/segformer_mit_b5.py new file mode 100644 index 0000000000000000000000000000000000000000..7f0762237dd35e601e3153ca2e52a89eac365e1d --- /dev/null +++ b/segformer_plusplus/configs/segformer_mit_b5.py @@ -0,0 +1,6 @@ +_base_ = ['./segformer_mit_b1.py'] + +backbone = dict( + embed_dims=64, + num_layers=[3, 6, 40, 3] +) diff --git a/segformer_plusplus/model/__init__.py b/segformer_plusplus/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..b680692d5ff945d85311a8c90c70766275444498 --- /dev/null +++ b/segformer_plusplus/model/__init__.py @@ -0,0 +1 @@ +__all__ = [] \ No newline at end of file diff --git a/segformer_plusplus/model/backbone/__init__.py b/segformer_plusplus/model/backbone/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8fc6fa95ea87f1419517325a0a49915b94996d74 --- /dev/null +++ b/segformer_plusplus/model/backbone/__init__.py @@ -0,0 +1,3 @@ +from .mit import MixVisionTransformer + +__all__ = ['MixVisionTransformer'] \ No newline at end of file diff --git a/segformer_plusplus/model/backbone/mit.py b/segformer_plusplus/model/backbone/mit.py new file mode 100644 index 0000000000000000000000000000000000000000..3dabcde1b48b834a1721a136ddbee1d0a56b8266 --- /dev/null +++ b/segformer_plusplus/model/backbone/mit.py @@ -0,0 +1,477 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +import torch +import torch.nn as nn +import torch.utils.checkpoint as cp +from tomesd.merge import bipartite_soft_matching_random2d + +from ...utils import PatchEmbed +from ...utils import nchw_to_nlc, nlc_to_nchw +from ...utils import MODELS +from ...utils import Conv2d, build_activation_layer, build_norm_layer, build_dropout +from ..base_module import BaseModule, MultiheadAttention, ModuleList, Sequential +from ..weight_init import (constant_init, normal_init, + trunc_normal_init) + + +class MixFFN(BaseModule): + """An implementation of MixFFN of Segformer. + + The differences between MixFFN & FFN: + 1. Use 1X1 Conv to replace Linear layer. + 2. Introduce 3X3 Conv to encode positional information. + Args: + embed_dims (int): The feature dimension. Same as + `MultiheadAttention`. Defaults: 256. + feedforward_channels (int): The hidden dimension of FFNs. + Defaults: 1024. + act_cfg (dict, optional): The activation config for FFNs. + Default: dict(type='ReLU') + ffn_drop (float, optional): Probability of an element to be + zeroed in FFN. Default 0.0. + dropout_layer (obj:`ConfigDict`): The dropout_layer used + when adding the shortcut. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + """ + + def __init__(self, + embed_dims, + feedforward_channels, + act_cfg=dict(type='GELU'), + ffn_drop=0., + dropout_layer=None, + init_cfg=None): + super().__init__(init_cfg) + + self.embed_dims = embed_dims + self.feedforward_channels = feedforward_channels + self.act_cfg = act_cfg + self.activate = build_activation_layer(act_cfg) + + in_channels = embed_dims + fc1 = Conv2d( + in_channels=in_channels, + out_channels=feedforward_channels, + kernel_size=1, + stride=1, + bias=True) + # 3x3 depth wise conv to provide positional encode information + pe_conv = Conv2d( + in_channels=feedforward_channels, + out_channels=feedforward_channels, + kernel_size=3, + stride=1, + padding=(3 - 1) // 2, + bias=True, + groups=feedforward_channels) + fc2 = Conv2d( + in_channels=feedforward_channels, + out_channels=in_channels, + kernel_size=1, + stride=1, + bias=True) + drop = nn.Dropout(ffn_drop) + layers = [fc1, pe_conv, self.activate, drop, fc2, drop] + self.layers = Sequential(*layers) + self.dropout_layer = build_dropout( + dropout_layer) if dropout_layer else torch.nn.Identity() + + def forward(self, x, hw_shape, identity=None): + out = nlc_to_nchw(x, hw_shape) + out = self.layers(out) + out = nchw_to_nlc(out) + if identity is None: + identity = x + return identity + self.dropout_layer(out) + + +class EfficientMultiheadAttention(MultiheadAttention): + """An implementation of Efficient Multi-head Attention of Segformer. + + This module is modified from MultiheadAttention which is a module from + mmcv.cnn.bricks.transformer. + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. + attn_drop (float): A Dropout layer on attn_output_weights. + Default: 0.0. + proj_drop (float): A Dropout layer after `nn.MultiheadAttention`. + Default: 0.0. + dropout_layer (obj:`ConfigDict`): The dropout_layer used + when adding the shortcut. Default: None. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + batch_first (bool): Key, Query and Value are shape of + (batch, n, embed_dim) + or (n, batch, embed_dim). Default: False. + qkv_bias (bool): enable bias for qkv if True. Default True. + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN'). + sr_ratio (int): The ratio of spatial reduction of Efficient Multi-head + Attention of Segformer. Default: 1. + """ + + def __init__(self, + embed_dims, + num_heads, + attn_drop=0., + proj_drop=0., + dropout_layer=None, + init_cfg=None, + batch_first=True, + qkv_bias=False, + tome_cfg=dict(), + norm_cfg=dict(type='LN'), + sr_ratio=1): + super().__init__( + embed_dims, + num_heads, + attn_drop, + proj_drop, + dropout_layer=dropout_layer, + init_cfg=init_cfg, + batch_first=batch_first, + bias=qkv_bias) + + self.q_mode = tome_cfg.get('q_mode') + self.kv_mode = tome_cfg.get('kv_mode') + self.tome_cfg = tome_cfg + + self.sr_ratio = sr_ratio + if sr_ratio > 1: + self.sr = Conv2d( + in_channels=embed_dims, + out_channels=embed_dims, + kernel_size=sr_ratio, + stride=sr_ratio) + # The ret[0] of build_norm_layer is norm name. + self.norm = build_norm_layer(norm_cfg, embed_dims)[1] + + def forward(self, x, hw_shape, identity=None): + x_q = x + + if self.sr_ratio > 1: + x_kv = nlc_to_nchw(x, hw_shape) + x_kv = self.sr(x_kv) + x_kv = nchw_to_nlc(x_kv) + x_kv = self.norm(x_kv) + else: + x_kv = x + + # 2D Neighbour Merging KV + if self.kv_mode == 'n2d': + kv_hw_shape = (int(hw_shape[0] / self.sr_ratio), int(hw_shape[1] / self.sr_ratio)) + x_kv = nlc_to_nchw(x_kv, kv_hw_shape) + x_kv = torch.nn.functional.avg_pool2d(x_kv, kernel_size=self.tome_cfg['kv_s'], + stride=self.tome_cfg['kv_s'], + ceil_mode=True) + x_kv = nchw_to_nlc(x_kv) + + # Bipartite Soft Matching (tomesd) KV + if self.kv_mode == 'bsm': + w_kv = int(hw_shape[1] / self.sr_ratio) + h_kv = int(hw_shape[0] / self.sr_ratio) + merge, unmerge = bipartite_soft_matching_random2d(metric=x_kv, w=w_kv, h=h_kv, + r=int(x_kv.size()[1] * self.tome_cfg['kv_r']), + sx=self.tome_cfg['kv_sx'], sy=self.tome_cfg['kv_sy'], + no_rand=True) + x_kv = merge(x_kv) + + if identity is None: + identity = x_q + + # 1D Neighbor Merging Q + if self.q_mode == 'n1d': + x_q = x_q.transpose(-2, -1) + x_q = torch.nn.functional.avg_pool1d(x_q, kernel_size=self.tome_cfg['q_s'], + stride=self.tome_cfg['q_s'], + ceil_mode=True) + x_q = x_q.transpose(-2, -1) + + # 2D Neighbor Merging Q + if self.q_mode == 'n2d': + reduced_hw = (int(torch.ceil(torch.tensor(hw_shape[0] / self.tome_cfg['q_s'][0]))), + int(torch.ceil(torch.tensor(hw_shape[1] / self.tome_cfg['q_s'][1])))) + x_q = nlc_to_nchw(x_q, hw_shape) + x_q = torch.nn.functional.avg_pool2d(x_q, kernel_size=self.tome_cfg['q_s'], + stride=self.tome_cfg['q_s'], + ceil_mode=True) + x_q = nchw_to_nlc(x_q) + + # Bipartite Soft Matching (tomesd) Q + if self.q_mode == 'bsm': + merge, unmerge = bipartite_soft_matching_random2d(metric=x_q, w=hw_shape[1], h=hw_shape[0], + r=int(x_q.size()[1] * self.tome_cfg['q_r']), + sx=self.tome_cfg['q_sx'], sy=self.tome_cfg['q_sy'], + no_rand=True) + x_q = merge(x_q) + + # Because the dataflow('key', 'query', 'value') of + # ``torch.nn.MultiheadAttention`` is (num_query, batch, + # embed_dims), We should adjust the shape of dataflow from + # batch_first (batch, num_query, embed_dims) to num_query_first + # (num_query ,batch, embed_dims), and recover ``attn_output`` + # from num_query_first to batch_first. + + if self.batch_first: + x_q = x_q.transpose(0, 1) + x_kv = x_kv.transpose(0, 1) + out = self.attn(query=x_q, key=x_kv, value=x_kv)[0] + if self.batch_first: + out = out.transpose(0, 1) + + # Unmerging BSM (tome+tomesd) + if self.q_mode == 'bsm': + out = unmerge(out) + + # Unmerging 1D Neighbour Merging + if self.q_mode == 'n1d': + out = out.transpose(-2, -1) + out = torch.nn.functional.interpolate(out, size=identity.size()[-2]) + out = out.transpose(-2, -1) + + # Unmerging 2D Neighbor Merging + if self.q_mode == 'n2d': + out = nlc_to_nchw(out, reduced_hw) + out = torch.nn.functional.interpolate(out, size=hw_shape) + out = nchw_to_nlc(out) + + return identity + self.dropout_layer(self.proj_drop(out)) + + +class TransformerEncoderLayer(BaseModule): + """Implements one encoder layer in Segformer. + + Args: + embed_dims (int): The feature dimension. + num_heads (int): Parallel attention heads. + feedforward_channels (int): The hidden dimension for FFNs. + drop_rate (float): Probability of an element to be zeroed. + after the feed forward layer. Default 0.0. + attn_drop_rate (float): The drop out rate for attention layer. + Default 0.0. + drop_path_rate (float): stochastic depth rate. Default 0.0. + qkv_bias (bool): enable bias for qkv if True. + Default: True. + act_cfg (dict): The activation config for FFNs. + Default: dict(type='GELU'). + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN'). + batch_first (bool): Key, Query and Value are shape of + (batch, n, embed_dim) + or (n, batch, embed_dim). Default: False. + init_cfg (dict, optional): Initialization config dict. + Default:None. + sr_ratio (int): The ratio of spatial reduction of Efficient Multi-head + Attention of Segformer. Default: 1. + with_cp (bool): Use checkpoint or not. Using checkpoint will save + some memory while slowing down the training speed. Default: False. + """ + + def __init__(self, + embed_dims, + num_heads, + feedforward_channels, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + qkv_bias=True, + tome_cfg=dict(), + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN'), + batch_first=True, + sr_ratio=1, + with_cp=False): + super().__init__() + + # The ret[0] of build_norm_layer is norm name. + self.norm1 = build_norm_layer(norm_cfg, embed_dims)[1] + + self.attn = EfficientMultiheadAttention( + embed_dims=embed_dims, + num_heads=num_heads, + attn_drop=attn_drop_rate, + proj_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + batch_first=batch_first, + qkv_bias=qkv_bias, + tome_cfg=tome_cfg, + norm_cfg=norm_cfg, + sr_ratio=sr_ratio) + + # The ret[0] of build_norm_layer is norm name. + self.norm2 = build_norm_layer(norm_cfg, embed_dims)[1] + + self.ffn = MixFFN( + embed_dims=embed_dims, + feedforward_channels=feedforward_channels, + ffn_drop=drop_rate, + dropout_layer=dict(type='DropPath', drop_prob=drop_path_rate), + act_cfg=act_cfg) + + self.with_cp = with_cp + + def forward(self, x, hw_shape): + + def _inner_forward(x): + x = self.attn(self.norm1(x), hw_shape, identity=x) + x = self.ffn(self.norm2(x), hw_shape, identity=x) + return x + + if self.with_cp and x.requires_grad: + x = cp.checkpoint(_inner_forward, x) + else: + x = _inner_forward(x) + return x + + +@MODELS.register_module() +class MixVisionTransformer(BaseModule): + """The backbone of Segformer. + + This backbone is the implementation of `SegFormer: Simple and + Efficient Design for Semantic Segmentation with + Transformers `_. + Args: + in_channels (int): Number of input channels. Default: 3. + embed_dims (int): Embedding dimension. Default: 768. + num_stags (int): The num of stages. Default: 4. + num_layers (Sequence[int]): The layer number of each transformer encode + layer. Default: [3, 4, 6, 3]. + num_heads (Sequence[int]): The attention heads of each transformer + encode layer. Default: [1, 2, 4, 8]. + patch_sizes (Sequence[int]): The patch_size of each overlapped patch + embedding. Default: [7, 3, 3, 3]. + strides (Sequence[int]): The stride of each overlapped patch embedding. + Default: [4, 2, 2, 2]. + sr_ratios (Sequence[int]): The spatial reduction rate of each + transformer encode layer. Default: [8, 4, 2, 1]. + out_indices (Sequence[int] | int): Output from which stages. + Default: (0, 1, 2, 3). + mlp_ratio (int): ratio of mlp hidden dim to embedding dim. + Default: 4. + qkv_bias (bool): Enable bias for qkv if True. Default: True. + drop_rate (float): Probability of an element to be zeroed. + Default 0.0 + attn_drop_rate (float): The drop out rate for attention layer. + Default 0.0 + drop_path_rate (float): stochastic depth rate. Default 0.0 + norm_cfg (dict): Config dict for normalization layer. + Default: dict(type='LN') + act_cfg (dict): The activation config for FFNs. + Default: dict(type='GELU'). + pretrained (str, optional): model pretrained path. Default: None. + init_cfg (dict or list[dict], optional): Initialization config dict. + Default: None. + with_cp (bool): Use checkpoint or not. Using checkpoint will save + some memory while slowing down the training speed. Default: False. + """ + + def __init__(self, + in_channels=3, + embed_dims=64, + num_stages=4, + num_layers=[3, 4, 6, 3], + num_heads=[1, 2, 4, 8], + patch_sizes=[7, 3, 3, 3], + strides=[4, 2, 2, 2], + sr_ratios=[8, 4, 2, 1], + out_indices=(0, 1, 2, 3), + mlp_ratio=4, + qkv_bias=True, + drop_rate=0., + attn_drop_rate=0., + drop_path_rate=0., + tome_cfg=[dict(), dict(), dict(), dict()], + act_cfg=dict(type='GELU'), + norm_cfg=dict(type='LN', eps=1e-6), + init_cfg=None, + with_cp=False, + down_sample=False): + super().__init__(init_cfg=init_cfg) + + self.embed_dims = embed_dims + self.num_stages = num_stages + self.num_layers = num_layers + self.num_heads = num_heads + self.patch_sizes = patch_sizes + self.strides = strides + self.sr_ratios = sr_ratios + self.with_cp = with_cp + self.down_sample = down_sample + assert num_stages == len(num_layers) == len(num_heads) \ + == len(patch_sizes) == len(strides) == len(sr_ratios) + + self.out_indices = out_indices + assert max(out_indices) < self.num_stages + + # transformer encoder + dpr = [ + x.item() + for x in torch.linspace(0, drop_path_rate, sum(num_layers)) + ] # stochastic num_layer decay rule + + cur = 0 + self.layers = ModuleList() + for i, num_layer in enumerate(num_layers): + embed_dims_i = embed_dims * num_heads[i] + patch_embed = PatchEmbed( + in_channels=in_channels, + embed_dims=embed_dims_i, + kernel_size=patch_sizes[i], + stride=strides[i], + padding=patch_sizes[i] // 2, + norm_cfg=norm_cfg) + layer = ModuleList([ + TransformerEncoderLayer( + embed_dims=embed_dims_i, + num_heads=num_heads[i], + feedforward_channels=mlp_ratio * embed_dims_i, + drop_rate=drop_rate, + attn_drop_rate=attn_drop_rate, + drop_path_rate=dpr[cur + idx], + qkv_bias=qkv_bias, + tome_cfg=tome_cfg[i], + act_cfg=act_cfg, + norm_cfg=norm_cfg, + with_cp=with_cp, + sr_ratio=sr_ratios[i]) for idx in range(num_layer) + ]) + in_channels = embed_dims_i + # The ret[0] of build_norm_layer is norm name. + norm = build_norm_layer(norm_cfg, embed_dims_i)[1] + self.layers.append(ModuleList([patch_embed, layer, norm])) + cur += num_layer + + def init_weights(self): + if self.init_cfg is None: + for m in self.modules(): + if isinstance(m, nn.Linear): + trunc_normal_init(m, std=.02, bias=0.) + elif isinstance(m, nn.LayerNorm): + constant_init(m, val=1.0, bias=0.) + elif isinstance(m, nn.Conv2d): + fan_out = m.kernel_size[0] * m.kernel_size[ + 1] * m.out_channels + fan_out //= m.groups + normal_init( + m, mean=0, std=math.sqrt(2.0 / fan_out), bias=0) + else: + super().init_weights() + + def forward(self, x): + if self.down_sample: + x = torch.nn.functional.interpolate(x, scale_factor=(0.5, 0.5)) + outs = [] + + for i, layer in enumerate(self.layers): + x, hw_shape = layer[0](x) + for block in layer[1]: + x = block(x, hw_shape) + x = layer[2](x) + x = nlc_to_nchw(x, hw_shape) + if i in self.out_indices: + outs.append(x) + + return outs diff --git a/segformer_plusplus/model/base_module.py b/segformer_plusplus/model/base_module.py new file mode 100644 index 0000000000000000000000000000000000000000..d9948e6a1f89808c85935544187e49e7f95de7f5 --- /dev/null +++ b/segformer_plusplus/model/base_module.py @@ -0,0 +1,390 @@ +import copy +from abc import ABCMeta +from collections import defaultdict +from typing import Iterable, List, Optional, Union, Callable +import warnings +from inspect import getfullargspec +import functools +import torch.nn as nn + +from .utils import is_model_wrapper +from .weight_init import PretrainedInit, initialize, update_init_info +from ..utils.activation import build_dropout +from ..utils.registry import MODELS + + +class BaseModule(nn.Module, metaclass=ABCMeta): + """Base module for all modules in openmmlab. ``BaseModule`` is a wrapper of + ``torch.nn.Module`` with additional functionality of parameter + initialization. Compared with ``torch.nn.Module``, ``BaseModule`` mainly + adds three attributes. + + - ``init_cfg``: the config to control the initialization. + - ``init_weights``: The function of parameter initialization and recording + initialization information. + - ``_params_init_info``: Used to track the parameter initialization + information. This attribute only exists during executing the + ``init_weights``. + + Note: + :obj:`PretrainedInit` has a higher priority than any other + initializer. The loaded pretrained weights will overwrite + the previous initialized weights. + + Args: + init_cfg (dict or List[dict], optional): Initialization config dict. + """ + + def __init__(self, init_cfg: Union[dict, List[dict], None] = None): + """Initialize BaseModule, inherited from `torch.nn.Module`""" + + # NOTE init_cfg can be defined in different levels, but init_cfg + # in low levels has a higher priority. + + super().__init__() + # define default value of init_cfg instead of hard code + # in init_weights() function + self._is_init = False + + self.init_cfg = copy.deepcopy(init_cfg) + + # Backward compatibility in derived classes + # if pretrained is not None: + # warnings.warn('DeprecationWarning: pretrained is a deprecated \ + # key, please consider using init_cfg') + # self.init_cfg = dict(type='Pretrained', checkpoint=pretrained) + + @property + def is_init(self): + return self._is_init + + @is_init.setter + def is_init(self, value): + self._is_init = value + + def init_weights(self): + """Initialize the weights.""" + + is_top_level_module = False + # check if it is top-level module + if not hasattr(self, '_params_init_info'): + # The `_params_init_info` is used to record the initialization + # information of the parameters + # the key should be the obj:`nn.Parameter` of model and the value + # should be a dict containing + # - init_info (str): The string that describes the initialization. + # - tmp_mean_value (FloatTensor): The mean of the parameter, + # which indicates whether the parameter has been modified. + # this attribute would be deleted after all parameters + # is initialized. + self._params_init_info = defaultdict(dict) + is_top_level_module = True + + # Initialize the `_params_init_info`, + # When detecting the `tmp_mean_value` of + # the corresponding parameter is changed, update related + # initialization information + for name, param in self.named_parameters(): + self._params_init_info[param][ + 'init_info'] = f'The value is the same before and ' \ + f'after calling `init_weights` ' \ + f'of {self.__class__.__name__} ' + self._params_init_info[param][ + 'tmp_mean_value'] = param.data.mean().cpu() + + # pass `params_init_info` to all submodules + # All submodules share the same `params_init_info`, + # so it will be updated when parameters are + # modified at any level of the model. + for sub_module in self.modules(): + sub_module._params_init_info = self._params_init_info + + module_name = self.__class__.__name__ + if not self._is_init: + if self.init_cfg: + + init_cfgs = self.init_cfg + if isinstance(self.init_cfg, dict): + init_cfgs = [self.init_cfg] + + # PretrainedInit has higher priority than any other init_cfg. + # Therefore we initialize `pretrained_cfg` last to overwrite + # the previous initialized weights. + # See details in https://github.com/open-mmlab/mmengine/issues/691 # noqa E501 + other_cfgs = [] + pretrained_cfg = [] + for init_cfg in init_cfgs: + assert isinstance(init_cfg, dict) + if (init_cfg['type'] == 'Pretrained' + or init_cfg['type'] is PretrainedInit): + pretrained_cfg.append(init_cfg) + else: + other_cfgs.append(init_cfg) + + initialize(self, other_cfgs) + + for m in self.children(): + if is_model_wrapper(m) and not hasattr(m, 'init_weights'): + m = m.module + if hasattr(m, 'init_weights') and not getattr( + m, 'is_init', False): + m.init_weights() + # users may overload the `init_weights` + update_init_info( + m, + init_info=f'Initialized by ' + f'user-defined `init_weights`' + f' in {m.__class__.__name__} ') + if self.init_cfg and pretrained_cfg: + initialize(self, pretrained_cfg) + self._is_init = True + + if is_top_level_module: + self._dump_init_info() + + for sub_module in self.modules(): + del sub_module._params_init_info + + def __repr__(self): + s = super().__repr__() + if self.init_cfg: + s += f'\ninit_cfg={self.init_cfg}' + return s + + +def deprecated_api_warning(name_dict: dict, + cls_name: Optional[str] = None) -> Callable: + """A decorator to check if some arguments are deprecate and try to replace + deprecate src_arg_name to dst_arg_name. + + Args: + name_dict(dict): + key (str): Deprecate argument names. + val (str): Expected argument names. + + Returns: + func: New function. + """ + + def api_warning_wrapper(old_func): + + @functools.wraps(old_func) + def new_func(*args, **kwargs): + # get the arg spec of the decorated method + args_info = getfullargspec(old_func) + # get name of the function + func_name = old_func.__name__ + if cls_name is not None: + func_name = f'{cls_name}.{func_name}' + if args: + arg_names = args_info.args[:len(args)] + for src_arg_name, dst_arg_name in name_dict.items(): + if src_arg_name in arg_names: + warnings.warn( + f'"{src_arg_name}" is deprecated in ' + f'`{func_name}`, please use "{dst_arg_name}" ' + 'instead', DeprecationWarning) + arg_names[arg_names.index(src_arg_name)] = dst_arg_name + if kwargs: + for src_arg_name, dst_arg_name in name_dict.items(): + if src_arg_name in kwargs: + assert dst_arg_name not in kwargs, ( + f'The expected behavior is to replace ' + f'the deprecated key `{src_arg_name}` to ' + f'new key `{dst_arg_name}`, but got them ' + f'in the arguments at the same time, which ' + f'is confusing. `{src_arg_name} will be ' + f'deprecated in the future, please ' + f'use `{dst_arg_name}` instead.') + + warnings.warn( + f'"{src_arg_name}" is deprecated in ' + f'`{func_name}`, please use "{dst_arg_name}" ' + 'instead', DeprecationWarning) + kwargs[dst_arg_name] = kwargs.pop(src_arg_name) + + # apply converted arguments to the decorated method + output = old_func(*args, **kwargs) + return output + + return new_func + + return api_warning_wrapper + + +@MODELS.register_module() +class MultiheadAttention(BaseModule): + """A wrapper for ``torch.nn.MultiheadAttention``. + + This module implements MultiheadAttention with identity connection, + and positional encoding is also passed as input. + + Args: + embed_dims (int): The embedding dimension. + num_heads (int): Parallel attention heads. + attn_drop (float): A Dropout layer on attn_output_weights. + Default: 0.0. + proj_drop (float): A Dropout layer after `nn.MultiheadAttention`. + Default: 0.0. + dropout_layer (obj:`ConfigDict`): The dropout_layer used + when adding the shortcut. + init_cfg (obj:`mmcv.ConfigDict`): The Config for initialization. + Default: None. + batch_first (bool): When it is True, Key, Query and Value are shape of + (batch, n, embed_dim), otherwise (n, batch, embed_dim). + Default to False. + """ + + def __init__(self, + embed_dims, + num_heads, + attn_drop=0., + proj_drop=0., + dropout_layer=dict(type='Dropout', drop_prob=0.), + init_cfg=None, + batch_first=False, + **kwargs): + super().__init__(init_cfg) + if 'dropout' in kwargs: + warnings.warn( + 'The arguments `dropout` in MultiheadAttention ' + 'has been deprecated, now you can separately ' + 'set `attn_drop`(float), proj_drop(float), ' + 'and `dropout_layer`(dict) ', DeprecationWarning) + attn_drop = kwargs['dropout'] + dropout_layer['drop_prob'] = kwargs.pop('dropout') + + self.embed_dims = embed_dims + self.num_heads = num_heads + self.batch_first = batch_first + + self.attn = nn.MultiheadAttention(embed_dims, num_heads, attn_drop, + **kwargs) + + self.proj_drop = nn.Dropout(proj_drop) + self.dropout_layer = build_dropout( + dropout_layer) if dropout_layer else nn.Identity() + + @deprecated_api_warning({'residual': 'identity'}, + cls_name='MultiheadAttention') + def forward(self, + query, + key=None, + value=None, + identity=None, + query_pos=None, + key_pos=None, + attn_mask=None, + key_padding_mask=None, + **kwargs): + """Forward function for `MultiheadAttention`. + + **kwargs allow passing a more general data flow when combining + with other operations in `transformerlayer`. + + Args: + query (Tensor): The input query with shape [num_queries, bs, + embed_dims] if self.batch_first is False, else + [bs, num_queries embed_dims]. + key (Tensor): The key tensor with shape [num_keys, bs, + embed_dims] if self.batch_first is False, else + [bs, num_keys, embed_dims] . + If None, the ``query`` will be used. Defaults to None. + value (Tensor): The value tensor with same shape as `key`. + Same in `nn.MultiheadAttention.forward`. Defaults to None. + If None, the `key` will be used. + identity (Tensor): This tensor, with the same shape as x, + will be used for the identity link. + If None, `x` will be used. Defaults to None. + query_pos (Tensor): The positional encoding for query, with + the same shape as `x`. If not None, it will + be added to `x` before forward function. Defaults to None. + key_pos (Tensor): The positional encoding for `key`, with the + same shape as `key`. Defaults to None. If not None, it will + be added to `key` before forward function. If None, and + `query_pos` has the same shape as `key`, then `query_pos` + will be used for `key_pos`. Defaults to None. + attn_mask (Tensor): ByteTensor mask with shape [num_queries, + num_keys]. Same in `nn.MultiheadAttention.forward`. + Defaults to None. + key_padding_mask (Tensor): ByteTensor with shape [bs, num_keys]. + Defaults to None. + + Returns: + Tensor: forwarded results with shape + [num_queries, bs, embed_dims] + if self.batch_first is False, else + [bs, num_queries embed_dims]. + """ + + if key is None: + key = query + if value is None: + value = key + if identity is None: + identity = query + if key_pos is None: + if query_pos is not None: + # use query_pos if key_pos is not available + if query_pos.shape == key.shape: + key_pos = query_pos + if query_pos is not None: + query = query + query_pos + if key_pos is not None: + key = key + key_pos + + # Because the dataflow('key', 'query', 'value') of + # ``torch.nn.MultiheadAttention`` is (num_query, batch, + # embed_dims), We should adjust the shape of dataflow from + # batch_first (batch, num_query, embed_dims) to num_query_first + # (num_query ,batch, embed_dims), and recover ``attn_output`` + # from num_query_first to batch_first. + if self.batch_first: + query = query.transpose(0, 1) + key = key.transpose(0, 1) + value = value.transpose(0, 1) + + out = self.attn( + query=query, + key=key, + value=value, + attn_mask=attn_mask, + key_padding_mask=key_padding_mask)[0] + + if self.batch_first: + out = out.transpose(0, 1) + + return identity + self.dropout_layer(self.proj_drop(out)) + + +class ModuleList(BaseModule, nn.ModuleList): + """ModuleList in openmmlab. + + Ensures that all modules in ``ModuleList`` have a different initialization + strategy than the outer model + + Args: + modules (iterable, optional): An iterable of modules to add. + init_cfg (dict, optional): Initialization config dict. + """ + + def __init__(self, + modules: Optional[Iterable] = None, + init_cfg: Optional[dict] = None): + BaseModule.__init__(self, init_cfg) + nn.ModuleList.__init__(self, modules) + + +class Sequential(BaseModule, nn.Sequential): + """Sequential module in openmmlab. + + Ensures that all modules in ``Sequential`` have a different initialization + strategy than the outer model + + Args: + init_cfg (dict, optional): Initialization config dict. + """ + + def __init__(self, *args, init_cfg: Optional[dict] = None): + BaseModule.__init__(self, init_cfg) + nn.Sequential.__init__(self, *args) \ No newline at end of file diff --git a/segformer_plusplus/model/head/__init__.py b/segformer_plusplus/model/head/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..939f5552815b4e712af13103e34e72469f240ec9 --- /dev/null +++ b/segformer_plusplus/model/head/__init__.py @@ -0,0 +1,3 @@ +from .segformer_head import SegformerHead + +__all__ = ['SegformerHead'] diff --git a/segformer_plusplus/model/head/segformer_head.py b/segformer_plusplus/model/head/segformer_head.py new file mode 100644 index 0000000000000000000000000000000000000000..6729bc6ab9784f7a2916ac91f233debba5000827 --- /dev/null +++ b/segformer_plusplus/model/head/segformer_head.py @@ -0,0 +1,95 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch +import torch.nn as nn + +from ...utils import MODELS +from ...utils import resize +from ..base_module import BaseModule +from ...utils.activation import ConvModule + + +@MODELS.register_module() +class SegformerHead(BaseModule): + """The all mlp Head of segformer. + + This head is the implementation of + `Segformer ` _. + + Args: + interpolate_mode: The interpolate mode of MLP head upsample operation. + Default: 'bilinear'. + """ + + def __init__(self, + in_channels=[32, 64, 160, 256], + in_index=[0, 1, 2, 3], + channels=256, + dropout_ratio=0.1, + out_channels=19, + norm_cfg=None, + align_corners=False, + interpolate_mode='bilinear'): + super().__init__() + + self.in_channels = in_channels + self.in_index = in_index + self.channels = channels + self.dropout_ratio = dropout_ratio + self.out_channels = out_channels + self.norm_cfg = norm_cfg + self.align_corners = align_corners + self.interpolate_mode = interpolate_mode + + self.act_cfg = dict(type='ReLU') + self.conv_seg = nn.Conv2d(channels, self.out_channels, kernel_size=1) + if dropout_ratio > 0: + self.dropout = nn.Dropout2d(dropout_ratio) + else: + self.dropout = None + + num_inputs = len(self.in_channels) + + assert num_inputs == len(self.in_index) + + self.convs = nn.ModuleList() + for i in range(num_inputs): + self.convs.append( + ConvModule( + in_channels=self.in_channels[i], + out_channels=self.channels, + kernel_size=1, + stride=1, + norm_cfg=self.norm_cfg, + act_cfg=self.act_cfg)) + + self.fusion_conv = ConvModule( + in_channels=self.channels * num_inputs, + out_channels=self.channels, + kernel_size=1, + norm_cfg=self.norm_cfg) + + def cls_seg(self, feat): + """Classify each pixel.""" + if self.dropout is not None: + feat = self.dropout(feat) + output = self.conv_seg(feat) + return output + + def forward(self, inputs): + # Receive 4 stage backbone feature map: 1/4, 1/8, 1/16, 1/32 + outs = [] + for idx in range(len(inputs)): + x = inputs[idx] + conv = self.convs[idx] + outs.append( + resize( + input=conv(x), + size=inputs[0].shape[2:], + mode=self.interpolate_mode, + align_corners=self.align_corners)) + + out = self.fusion_conv(torch.cat(outs, dim=1)) + + out = self.cls_seg(out) + + return out diff --git a/segformer_plusplus/model/utils.py b/segformer_plusplus/model/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..10e071a71bd0e8565ed85e8ff215842bbbe12e30 --- /dev/null +++ b/segformer_plusplus/model/utils.py @@ -0,0 +1,27 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import torch.nn as nn + +from ..utils.registry import Registry + + +MODEL_WRAPPERS = Registry('model_wrapper') + +def is_model_wrapper(model: nn.Module, registry: Registry = MODEL_WRAPPERS): + """Check if a module is a model wrapper. + + Args: + model (nn.Module): The model to be checked. + registry (Registry): The parent registry to search for model wrappers. + + Returns: + bool: True if the input model is a model wrapper. + """ + module_wrappers = tuple(registry.module_dict.values()) + if isinstance(model, module_wrappers): + return True + + if not registry.children: + return False + + return any( + is_model_wrapper(model, child) for child in registry.children.values()) diff --git a/segformer_plusplus/model/weight_init.py b/segformer_plusplus/model/weight_init.py new file mode 100644 index 0000000000000000000000000000000000000000..53b438be5fabd0fcdd122960fdd2ae7248fb76c4 --- /dev/null +++ b/segformer_plusplus/model/weight_init.py @@ -0,0 +1,413 @@ + +import copy +import math +import warnings +import inspect +from typing import Any, Optional, Union +import torch +import torch.nn as nn +from torch import Tensor + +from ..configs.config.config import Config, ConfigDict +from ..utils.registry import Registry +from ..utils.manager import ManagerMixin + + +WEIGHT_INITIALIZERS = Registry('weight initializer') + +@WEIGHT_INITIALIZERS.register_module(name='Pretrained') +class PretrainedInit: + """Initialize module by loading a pretrained model. + + Args: + checkpoint (str): the checkpoint file of the pretrained model should + be load. + prefix (str, optional): the prefix of a sub-module in the pretrained + model. it is for loading a part of the pretrained model to + initialize. For example, if we would like to only load the + backbone of a detector model, we can set ``prefix='backbone.'``. + Defaults to None. + map_location (str): map tensors into proper locations. Defaults to cpu. + """ + + def __init__(self, checkpoint, prefix=None, map_location='cpu'): + self.checkpoint = checkpoint + self.prefix = prefix + self.map_location = map_location + + def __call__(self, module): + from mmengine.runner.checkpoint import (_load_checkpoint_with_prefix, + load_checkpoint, + load_state_dict) + if self.prefix is None: + load_checkpoint( + module, + self.checkpoint, + map_location=self.map_location, + strict=False, + logger='current') + else: + state_dict = _load_checkpoint_with_prefix( + self.prefix, self.checkpoint, map_location=self.map_location) + load_state_dict(module, state_dict, strict=False, logger='current') + + if hasattr(module, '_params_init_info'): + update_init_info(module, init_info=self._get_init_info()) + + def _get_init_info(self): + info = f'{self.__class__.__name__}: load from {self.checkpoint}' + return info + + +def update_init_info(module, init_info): + """Update the `_params_init_info` in the module if the value of parameters + are changed. + + Args: + module (obj:`nn.Module`): The module of PyTorch with a user-defined + attribute `_params_init_info` which records the initialization + information. + init_info (str): The string that describes the initialization. + """ + assert hasattr( + module, + '_params_init_info'), f'Can not find `_params_init_info` in {module}' + for name, param in module.named_parameters(): + + assert param in module._params_init_info, ( + f'Find a new :obj:`Parameter` ' + f'named `{name}` during executing the ' + f'`init_weights` of ' + f'`{module.__class__.__name__}`. ' + f'Please do not add or ' + f'replace parameters during executing ' + f'the `init_weights`. ') + + # The parameter has been changed during executing the + # `init_weights` of module + mean_value = param.data.mean().cpu() + if module._params_init_info[param]['tmp_mean_value'] != mean_value: + module._params_init_info[param]['init_info'] = init_info + module._params_init_info[param]['tmp_mean_value'] = mean_value + + +def initialize(module, init_cfg): + r"""Initialize a module. + + Args: + module (``torch.nn.Module``): the module will be initialized. + init_cfg (dict | list[dict]): initialization configuration dict to + define initializer. OpenMMLab has implemented 6 initializers + including ``Constant``, ``Xavier``, ``Normal``, ``Uniform``, + ``Kaiming``, and ``Pretrained``. + + Example: + >>> module = nn.Linear(2, 3, bias=True) + >>> init_cfg = dict(type='Constant', layer='Linear', val =1 , bias =2) + >>> initialize(module, init_cfg) + >>> module = nn.Sequential(nn.Conv1d(3, 1, 3), nn.Linear(1,2)) + >>> # define key ``'layer'`` for initializing layer with different + >>> # configuration + >>> init_cfg = [dict(type='Constant', layer='Conv1d', val=1), + dict(type='Constant', layer='Linear', val=2)] + >>> initialize(module, init_cfg) + >>> # define key``'override'`` to initialize some specific part in + >>> # module + >>> class FooNet(nn.Module): + >>> def __init__(self): + >>> super().__init__() + >>> self.feat = nn.Conv2d(3, 16, 3) + >>> self.reg = nn.Conv2d(16, 10, 3) + >>> self.cls = nn.Conv2d(16, 5, 3) + >>> model = FooNet() + >>> init_cfg = dict(type='Constant', val=1, bias=2, layer='Conv2d', + >>> override=dict(type='Constant', name='reg', val=3, bias=4)) + >>> initialize(model, init_cfg) + >>> model = ResNet(depth=50) + >>> # Initialize weights with the pretrained model. + >>> init_cfg = dict(type='Pretrained', + checkpoint='torchvision://resnet50') + >>> initialize(model, init_cfg) + >>> # Initialize weights of a sub-module with the specific part of + >>> # a pretrained model by using "prefix". + >>> url = 'http://download.openmmlab.com/mmdetection/v2.0/retinanet/'\ + >>> 'retinanet_r50_fpn_1x_coco/'\ + >>> 'retinanet_r50_fpn_1x_coco_20200130-c2398f9e.pth' + >>> init_cfg = dict(type='Pretrained', + checkpoint=url, prefix='backbone.') + """ + if not isinstance(init_cfg, (dict, list)): + raise TypeError(f'init_cfg must be a dict or a list of dict, \ + but got {type(init_cfg)}') + + if isinstance(init_cfg, dict): + init_cfg = [init_cfg] + + for cfg in init_cfg: + # should deeply copy the original config because cfg may be used by + # other modules, e.g., one init_cfg shared by multiple bottleneck + # blocks, the expected cfg will be changed after pop and will change + # the initialization behavior of other modules + cp_cfg = copy.deepcopy(cfg) + override = cp_cfg.pop('override', None) + _initialize(module, cp_cfg) + + if override is not None: + cp_cfg.pop('layer', None) + _initialize_override(module, override, cp_cfg) + else: + # All attributes in module have same initialization. + pass + + +def _initialize(module, cfg, wholemodule=False): + func = build_from_cfg(cfg, WEIGHT_INITIALIZERS) + # wholemodule flag is for override mode, there is no layer key in override + # and initializer will give init values for the whole module with the name + # in override. + func.wholemodule = wholemodule + func(module) + + +def _initialize_override(module, override, cfg): + if not isinstance(override, (dict, list)): + raise TypeError(f'override must be a dict or a list of dict, \ + but got {type(override)}') + + override = [override] if isinstance(override, dict) else override + + for override_ in override: + + cp_override = copy.deepcopy(override_) + name = cp_override.pop('name', None) + if name is None: + raise ValueError('`override` must contain the key "name",' + f'but got {cp_override}') + # if override only has name key, it means use args in init_cfg + if not cp_override: + cp_override.update(cfg) + # if override has name key and other args except type key, it will + # raise error + elif 'type' not in cp_override.keys(): + raise ValueError( + f'`override` need "type" key, but got {cp_override}') + + if hasattr(module, name): + _initialize(getattr(module, name), cp_override, wholemodule=True) + else: + raise RuntimeError(f'module did not have attribute {name}, ' + f'but init_cfg is {cp_override}.') + + +def build_from_cfg( + cfg: Union[dict, ConfigDict, Config], + registry: Registry, + default_args: Optional[Union[dict, ConfigDict, Config]] = None) -> Any: + """Build a module from config dict when it is a class configuration, or + call a function from config dict when it is a function configuration. + + If the global variable default scope (:obj:`DefaultScope`) exists, + :meth:`build` will firstly get the responding registry and then call + its own :meth:`build`. + + At least one of the ``cfg`` and ``default_args`` contains the key "type", + which should be either str or class. If they all contain it, the key + in ``cfg`` will be used because ``cfg`` has a high priority than + ``default_args`` that means if a key exists in both of them, the value of + the key will be ``cfg[key]``. They will be merged first and the key "type" + will be popped up and the remaining keys will be used as initialization + arguments. + + Examples: + >>> from mmengine import Registry, build_from_cfg + >>> MODELS = Registry('models') + >>> @MODELS.register_module() + >>> class ResNet: + >>> def __init__(self, depth, stages=4): + >>> self.depth = depth + >>> self.stages = stages + >>> cfg = dict(type='ResNet', depth=50) + >>> model = build_from_cfg(cfg, MODELS) + >>> # Returns an instantiated object + >>> @MODELS.register_module() + >>> def resnet50(): + >>> pass + >>> resnet = build_from_cfg(dict(type='resnet50'), MODELS) + >>> # Return a result of the calling function + + Args: + cfg (dict or ConfigDict or Config): Config dict. It should at least + contain the key "type". + registry (:obj:`Registry`): The registry to search the type from. + default_args (dict or ConfigDict or Config, optional): Default + initialization arguments. Defaults to None. + + Returns: + object: The constructed object. + """ + if not isinstance(cfg, (dict, ConfigDict, Config)): + raise TypeError( + f'cfg should be a dict, ConfigDict or Config, but got {type(cfg)}') + + if 'type' not in cfg: + if default_args is None or 'type' not in default_args: + raise KeyError( + '`cfg` or `default_args` must contain the key "type", ' + f'but got {cfg}\n{default_args}') + + if not isinstance(registry, Registry): + raise TypeError('registry must be a mmengine.Registry object, ' + f'but got {type(registry)}') + + if not (isinstance(default_args, + (dict, ConfigDict, Config)) or default_args is None): + raise TypeError( + 'default_args should be a dict, ConfigDict, Config or None, ' + f'but got {type(default_args)}') + + args = cfg.copy() + if default_args is not None: + for name, value in default_args.items(): + args.setdefault(name, value) + + # Instance should be built under target scope, if `_scope_` is defined + # in cfg, current default scope should switch to specified scope + # temporarily. + scope = args.pop('_scope_', None) + with registry.switch_scope_and_registry(scope) as registry: + obj_type = args.pop('type') + if isinstance(obj_type, str): + obj_cls = registry.get(obj_type) + if obj_cls is None: + raise KeyError( + f'{obj_type} is not in the {registry.scope}::{registry.name} registry. ' # noqa: E501 + f'Please check whether the value of `{obj_type}` is ' + 'correct or it was registered as expected. More details ' + 'can be found at ' + 'https://mmengine.readthedocs.io/en/latest/advanced_tutorials/config.html#import-the-custom-module' # noqa: E501 + ) + # this will include classes, functions, partial functions and more + elif callable(obj_type): + obj_cls = obj_type + else: + raise TypeError( + f'type must be a str or valid type, but got {type(obj_type)}') + + # If `obj_cls` inherits from `ManagerMixin`, it should be + # instantiated by `ManagerMixin.get_instance` to ensure that it + # can be accessed globally. + if inspect.isclass(obj_cls) and \ + issubclass(obj_cls, ManagerMixin): # type: ignore + obj = obj_cls.get_instance(**args) # type: ignore + else: + obj = obj_cls(**args) # type: ignore + return obj + + +def constant_init(module, val, bias=0): + if hasattr(module, 'weight') and module.weight is not None: + nn.init.constant_(module.weight, val) + if hasattr(module, 'bias') and module.bias is not None: + nn.init.constant_(module.bias, bias) + + +def normal_init(module, mean=0, std=1, bias=0): + if hasattr(module, 'weight') and module.weight is not None: + nn.init.normal_(module.weight, mean, std) + if hasattr(module, 'bias') and module.bias is not None: + nn.init.constant_(module.bias, bias) + + +def trunc_normal_init(module: nn.Module, + mean: float = 0, + std: float = 1, + a: float = -2, + b: float = 2, + bias: float = 0) -> None: + if hasattr(module, 'weight') and module.weight is not None: + trunc_normal_(module.weight, mean, std, a, b) # type: ignore + if hasattr(module, 'bias') and module.bias is not None: + nn.init.constant_(module.bias, bias) # type: ignore + + +def kaiming_init(module, + a=0, + mode='fan_out', + nonlinearity='relu', + bias=0, + distribution='normal'): + assert distribution in ['uniform', 'normal'] + if hasattr(module, 'weight') and module.weight is not None: + if distribution == 'uniform': + nn.init.kaiming_uniform_( + module.weight, a=a, mode=mode, nonlinearity=nonlinearity) + else: + nn.init.kaiming_normal_( + module.weight, a=a, mode=mode, nonlinearity=nonlinearity) + if hasattr(module, 'bias') and module.bias is not None: + nn.init.constant_(module.bias, bias) + + +def trunc_normal_(tensor: Tensor, + mean: float = 0., + std: float = 1., + a: float = -2., + b: float = 2.) -> Tensor: + r"""Fills the input Tensor with values drawn from a truncated normal + distribution. The values are effectively drawn from the normal distribution + :math:`\mathcal{N}(\text{mean}, \text{std}^2)` with values outside + :math:`[a, b]` redrawn until they are within the bounds. The method used + for generating the random values works best when :math:`a \leq \text{mean} + \leq b`. + + Modified from + https://github.com/pytorch/pytorch/blob/master/torch/nn/init.py + + Args: + tensor (``torch.Tensor``): an n-dimensional `torch.Tensor`. + mean (float): the mean of the normal distribution. + std (float): the standard deviation of the normal distribution. + a (float): the minimum cutoff value. + b (float): the maximum cutoff value. + """ + return _no_grad_trunc_normal_(tensor, mean, std, a, b) + + +def _no_grad_trunc_normal_(tensor: Tensor, mean: float, std: float, a: float, + b: float) -> Tensor: + # Method based on + # https://people.sc.fsu.edu/~jburkardt/presentations/truncated_normal.pdf + # Modified from + # https://github.com/pytorch/pytorch/blob/master/torch/nn/init.py + def norm_cdf(x): + # Computes standard normal cumulative distribution function + return (1. + math.erf(x / math.sqrt(2.))) / 2. + + if (mean < a - 2 * std) or (mean > b + 2 * std): + warnings.warn( + 'mean is more than 2 std from [a, b] in nn.init.trunc_normal_. ' + 'The distribution of values may be incorrect.', + stacklevel=2) + + with torch.no_grad(): + # Values are generated by using a truncated uniform distribution and + # then using the inverse CDF for the normal distribution. + # Get upper and lower cdf values + lower = norm_cdf((a - mean) / std) + upper = norm_cdf((b - mean) / std) + + # Uniformly fill tensor with values from [lower, upper], then translate + # to [2lower-1, 2upper-1]. + tensor.uniform_(2 * lower - 1, 2 * upper - 1) + + # Use inverse cdf transform for normal distribution to get truncated + # standard normal + tensor.erfinv_() + + # Transform to proper mean, std + tensor.mul_(std * math.sqrt(2.)) + tensor.add_(mean) + + # Clamp to ensure it's in the proper range + tensor.clamp_(min=a, max=b) + return tensor \ No newline at end of file diff --git a/segformer_plusplus/random_benchmark.py b/segformer_plusplus/random_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..8cb1628279a081dd2c64635faea0d154d458ddd9 --- /dev/null +++ b/segformer_plusplus/random_benchmark.py @@ -0,0 +1,61 @@ +from typing import Union, List, Tuple + +import numpy as np +import torch + +from .utils import benchmark + +device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu') + + +def random_benchmark( + model: torch.nn.Module, + batch_size: Union[int, List[int]] = 1, + image_size: Union[Tuple[int], List[Tuple[int]]] = (3, 1024, 1024), +): + """ + Calculate the FPS of a given model using randomly generated tensors. + + Args: + model: instance of a model (e.g. SegFormer) + batch_size: the batch size(s) at which to calculate the FPS (e.g. 1 or [1, 2, 4]) + image_size: the size of the images to use (e.g. (3, 1024, 1024)) + + Returns: the FPS values calculated for all image sizes and batch sizes in the form of a dictionary + + """ + if isinstance(batch_size, int): + batch_size = [batch_size] + if isinstance(image_size, tuple): + image_size = [image_size] + + values = {} + throughput_values = [] + + for i in image_size: + # fill with fps for each batch size + fps = [] + for b in batch_size: + for _ in range(4): + # Baseline benchmark + if i[1] >= 1024: + r = 16 + else: + r = 32 + baseline_throughput = benchmark( + model.to(device), + device=device, + verbose=True, + runs=r, + batch_size=b, + input_size=i + ) + throughput_values.append(baseline_throughput) + throughput_values = np.asarray(throughput_values) + throughput = np.around(np.mean(throughput_values), decimals=2) + print('Im_size:', i, 'Batch_size:', b, 'Mean:', throughput, 'Std:', + np.around(np.std(throughput_values), decimals=2)) + throughput_values = [] + fps.append({b: throughput}) + values[i] = fps + return values diff --git a/segformer_plusplus/start_cityscape_benchmark.py b/segformer_plusplus/start_cityscape_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..f140c50cf2329c3bb97c6ab5a7f200feb4c945d9 --- /dev/null +++ b/segformer_plusplus/start_cityscape_benchmark.py @@ -0,0 +1,53 @@ +import os +import torch +import numpy as np +import argparse + +from .build_model import create_model +from .cityscape_benchmark import cityscape_benchmark + +parser = argparse.ArgumentParser(description="Segformer Benchmarking Script") +parser.add_argument('--backbone', type=str, default='b0', choices=['b0', 'b1', 'b2', 'b3', 'b4', 'b5'], help='Model backbone version') +parser.add_argument('--head', type=str, default='bsm_hq', choices=['bsm_hq', 'bsm_fast', 'n2d_2x2'], help='Model head type') +parser.add_argument('--checkpoint', type=str, default=None, help='Path to .pth checkpoint file (optional)') +args = parser.parse_args() + +model = create_model(args.backbone, args.head, pretrained=True) + +if args.checkpoint: + checkpoint_path = os.path.expanduser(args.checkpoint) + print(f"Loading checkpoint: {checkpoint_path}") + checkpoint = torch.load(checkpoint_path) + model.load_state_dict(checkpoint) +else: + print("No checkpoint provided – using model as initialized.") + +image_path = os.path.expanduser('~/SegformerPlusPlus/mmsegmentation/data/cityscapes/leftImg8bit/test/berlin/berlin_000543_000019_leftImg8bit.png') +result = cityscape_benchmark(model, image_path) + +print("Cityscapes Benchmark Results:", result) + +reference_txt_path = os.path.expanduser('~/SegformerPlusPlus/model/cityscapes_prediction_output_reference.txt') +generated_txt_path = os.path.expanduser('~/SegformerPlusPlus/model/cityscapes_prediction_output.txt') + +if os.path.exists(reference_txt_path) and os.path.exists(generated_txt_path): + ref_arr = np.loadtxt(reference_txt_path, dtype=int) + gen_arr = np.loadtxt(generated_txt_path, dtype=int) + + if ref_arr.shape != gen_arr.shape: + print(f"Files have different shapes: {ref_arr.shape} vs. {gen_arr.shape}") + else: + total_elements = ref_arr.size + equal_elements = np.sum(ref_arr == gen_arr) + similarity = equal_elements / total_elements + + threshold = 0.999 + if similarity >= threshold: + print(f"Outputs are {similarity*100:.4f}% identical (>= {threshold*100}%).") + else: + print(f"Outputs differ by {100 - similarity*100:.4f}%.") +else: + if not os.path.exists(reference_txt_path): + print(f"Reference file not found: {reference_txt_path}") + if not os.path.exists(generated_txt_path): + print(f"Generated output file not found: {generated_txt_path}") diff --git a/segformer_plusplus/start_random_benchmark.py b/segformer_plusplus/start_random_benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..fec133ddef4836e4d2549849adc95e8ffb0aabf3 --- /dev/null +++ b/segformer_plusplus/start_random_benchmark.py @@ -0,0 +1,5 @@ +from .build_model import create_model +from .random_benchmark import random_benchmark + +model = create_model('b5', 'bsm_hq', pretrained=True) +v = random_benchmark(model) \ No newline at end of file diff --git a/segformer_plusplus/utils/__init__.py b/segformer_plusplus/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a16c7795ef0e3cddd17a38475e9c2a1601c7984f --- /dev/null +++ b/segformer_plusplus/utils/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) OpenMMLab. All rights reserved. +from .embed import PatchEmbed +from .shape_convert import nchw_to_nlc, nlc_to_nchw +from .wrappers import resize, Conv2d +from .tome_presets import tome_presets +from .registry import MODELS +from .imagenet_weights import imagenet_weights +from .benchmark import benchmark +from .activation import build_activation_layer, build_norm_layer, build_dropout +from .version_utils import digit_version + + +__all__ = [ + 'PatchEmbed', 'nchw_to_nlc', 'nlc_to_nchw', 'resize', 'Conv2d', 'tome_presets', 'MODELS', 'imagenet_weights', 'benchmark', 'build_activation_layer', 'build_norm_layer', 'build_dropout', 'digit_version' +] diff --git a/segformer_plusplus/utils/activation.py b/segformer_plusplus/utils/activation.py new file mode 100644 index 0000000000000000000000000000000000000000..43f037b1f3ed0a02184d470ffe1b02c4ad89a121 --- /dev/null +++ b/segformer_plusplus/utils/activation.py @@ -0,0 +1,430 @@ +import warnings +from typing import Dict, Tuple, Union, Any, Optional +import inspect +import torch.nn as nn +import torch +from torch.nn.modules.batchnorm import _BatchNorm +from torch.nn.modules.instancenorm import _InstanceNorm + +from ..model.weight_init import constant_init, kaiming_init +from ..utils.registry import MODELS +from .build_functions import SyncBatchNorm + + +TORCH_VERSION = torch.__version__ + +# register norm-layers +MODELS.register_module('BN', module=nn.BatchNorm2d) +MODELS.register_module('BN1d', module=nn.BatchNorm1d) +MODELS.register_module('BN2d', module=nn.BatchNorm2d) +MODELS.register_module('BN3d', module=nn.BatchNorm3d) +MODELS.register_module('SyncBN', module=SyncBatchNorm) +MODELS.register_module('GN', module=nn.GroupNorm) +MODELS.register_module('LN', module=nn.LayerNorm) +MODELS.register_module('IN', module=nn.InstanceNorm2d) +MODELS.register_module('IN1d', module=nn.InstanceNorm1d) +MODELS.register_module('IN2d', module=nn.InstanceNorm2d) +MODELS.register_module('IN3d', module=nn.InstanceNorm3d) +# register conv-layers +MODELS.register_module('Conv1d', module=nn.Conv1d) +MODELS.register_module('Conv2d', module=nn.Conv2d) +MODELS.register_module('Conv3d', module=nn.Conv3d) +MODELS.register_module('Conv', module=nn.Conv2d) +# register activation-functions +MODELS.register_module('GELU', module=nn.GELU) +MODELS.register_module('ReLU', module=nn.ReLU) + +def build_activation_layer(cfg: Dict) -> nn.Module: + """Build activation layer. + + Args: + cfg (dict): The activation layer config, which should contain: + + - type (str): Layer type. + - layer args: Args needed to instantiate an activation layer. + + Returns: + nn.Module: Created activation layer. + """ + return MODELS.build(cfg) + + +def build_norm_layer(cfg: Dict, + num_features: int, + postfix: Union[int, str] = '') -> Tuple[str, nn.Module]: + """Build normalization layer. + + Args: + cfg (dict): The norm layer config, which should contain: + + - type (str): Layer type. + - layer args: Args needed to instantiate a norm layer. + - requires_grad (bool, optional): Whether stop gradient updates. + num_features (int): Number of input channels. + postfix (int | str): The postfix to be appended into norm abbreviation + to create named layer. + + Returns: + tuple[str, nn.Module]: The first element is the layer name consisting + of abbreviation and postfix, e.g., bn1, gn. The second element is the + created norm layer. + """ + if not isinstance(cfg, dict): + raise TypeError('cfg must be a dict') + if 'type' not in cfg: + raise KeyError('the cfg dict must contain the key "type"') + cfg_ = cfg.copy() + + layer_type = cfg_.pop('type') + + # Switch registry to the target scope. If `norm_layer` cannot be found + # in the registry, fallback to search `norm_layer` in the + # mmengine.MODELS. + with MODELS.switch_scope_and_registry(None) as registry: + norm_layer = registry.get(layer_type) + if norm_layer is None: + raise KeyError(f'Cannot find {norm_layer} in registry under scope ' + f'name {registry.scope}') + abbr = infer_abbr(norm_layer) + + assert isinstance(postfix, (int, str)) + name = abbr + str(postfix) + + requires_grad = cfg_.pop('requires_grad', True) + cfg_.setdefault('eps', 1e-5) + if layer_type != 'GN': + layer = norm_layer(num_features, **cfg_) + if layer_type == 'SyncBN' and hasattr(layer, '_specify_ddp_gpu_num'): + layer._specify_ddp_gpu_num(1) + else: + assert 'num_groups' in cfg_ + layer = norm_layer(num_channels=num_features, **cfg_) + + for param in layer.parameters(): + param.requires_grad = requires_grad + + return name, layer + + +def infer_abbr(class_type): + """Infer abbreviation from the class name. + + When we build a norm layer with `build_norm_layer()`, we want to preserve + the norm type in variable names, e.g, self.bn1, self.gn. This method will + infer the abbreviation to map class types to abbreviations. + + Rule 1: If the class has the property "_abbr_", return the property. + Rule 2: If the parent class is _BatchNorm, GroupNorm, LayerNorm or + InstanceNorm, the abbreviation of this layer will be "bn", "gn", "ln" and + "in" respectively. + Rule 3: If the class name contains "batch", "group", "layer" or "instance", + the abbreviation of this layer will be "bn", "gn", "ln" and "in" + respectively. + Rule 4: Otherwise, the abbreviation falls back to "norm". + + Args: + class_type (type): The norm layer type. + + Returns: + str: The inferred abbreviation. + """ + if not inspect.isclass(class_type): + raise TypeError( + f'class_type must be a type, but got {type(class_type)}') + if hasattr(class_type, '_abbr_'): + return class_type._abbr_ + if issubclass(class_type, _InstanceNorm): # IN is a subclass of BN + return 'in' + elif issubclass(class_type, _BatchNorm): + return 'bn' + elif issubclass(class_type, nn.GroupNorm): + return 'gn' + elif issubclass(class_type, nn.LayerNorm): + return 'ln' + else: + class_name = class_type.__name__.lower() + if 'batch' in class_name: + return 'bn' + elif 'group' in class_name: + return 'gn' + elif 'layer' in class_name: + return 'ln' + elif 'instance' in class_name: + return 'in' + else: + return 'norm_layer' + + +def build_dropout(cfg: Dict, default_args: Optional[Dict] = None) -> Any: + """Builder for drop out layers.""" + return MODELS.build(cfg, default_args=default_args) + + +def build_conv_layer(cfg: Optional[Dict], *args, **kwargs) -> nn.Module: + """Build convolution layer. + + Args: + cfg (None or dict): The conv layer config, which should contain: + - type (str): Layer type. + - layer args: Args needed to instantiate an conv layer. + args (argument list): Arguments passed to the `__init__` + method of the corresponding conv layer. + kwargs (keyword arguments): Keyword arguments passed to the `__init__` + method of the corresponding conv layer. + + Returns: + nn.Module: Created conv layer. + """ + if cfg is None: + cfg_ = dict(type='Conv2d') + else: + if not isinstance(cfg, dict): + raise TypeError('cfg must be a dict') + if 'type' not in cfg: + raise KeyError('the cfg dict must contain the key "type"') + cfg_ = cfg.copy() + + layer_type = cfg_.pop('type') + + # Switch registry to the target scope. If `conv_layer` cannot be found + # in the registry, fallback to search `conv_layer` in the + # mmengine.MODELS. + with MODELS.switch_scope_and_registry(None) as registry: + conv_layer = registry.get(layer_type) + if conv_layer is None: + raise KeyError(f'Cannot find {conv_layer} in registry under scope ' + f'name {registry.scope}') + layer = conv_layer(*args, **kwargs, **cfg_) + + return layer + + +def build_padding_layer(cfg: Dict, *args, **kwargs) -> nn.Module: + """Build padding layer. + + Args: + cfg (dict): The padding layer config, which should contain: + - type (str): Layer type. + - layer args: Args needed to instantiate a padding layer. + + Returns: + nn.Module: Created padding layer. + """ + if not isinstance(cfg, dict): + raise TypeError('cfg must be a dict') + if 'type' not in cfg: + raise KeyError('the cfg dict must contain the key "type"') + + cfg_ = cfg.copy() + padding_type = cfg_.pop('type') + + # Switch registry to the target scope. If `padding_layer` cannot be found + # in the registry, fallback to search `padding_layer` in the + # mmengine.MODELS. + with MODELS.switch_scope_and_registry(None) as registry: + padding_layer = registry.get(padding_type) + if padding_layer is None: + raise KeyError(f'Cannot find {padding_layer} in registry under scope ' + f'name {registry.scope}') + layer = padding_layer(*args, **kwargs, **cfg_) + + return layer + + +@MODELS.register_module() +class ConvModule(nn.Module): + """A conv block that bundles conv/norm/activation layers. + + This block simplifies the usage of convolution layers, which are commonly + used with a norm layer (e.g., BatchNorm) and activation layer (e.g., ReLU). + It is based upon three build methods: `build_conv_layer()`, + `build_norm_layer()` and `build_activation_layer()`. + + Besides, we add some additional features in this module. + 1. Automatically set `bias` of the conv layer. + 2. Spectral norm is supported. + 3. More padding modes are supported. Before PyTorch 1.5, nn.Conv2d only + supports zero and circular padding, and we add "reflect" padding mode. + + Args: + in_channels (int): Number of channels in the input feature map. + Same as that in ``nn._ConvNd``. + out_channels (int): Number of channels produced by the convolution. + Same as that in ``nn._ConvNd``. + kernel_size (int | tuple[int]): Size of the convolving kernel. + Same as that in ``nn._ConvNd``. + stride (int | tuple[int]): Stride of the convolution. + Same as that in ``nn._ConvNd``. + padding (int | tuple[int]): Zero-padding added to both sides of + the input. Same as that in ``nn._ConvNd``. + dilation (int | tuple[int]): Spacing between kernel elements. + Same as that in ``nn._ConvNd``. + groups (int): Number of blocked connections from input channels to + output channels. Same as that in ``nn._ConvNd``. + bias (bool | str): If specified as `auto`, it will be decided by the + norm_cfg. Bias will be set as True if `norm_cfg` is None, otherwise + False. Default: "auto". + conv_cfg (dict): Config dict for convolution layer. Default: None, + which means using conv2d. + norm_cfg (dict): Config dict for normalization layer. Default: None. + act_cfg (dict): Config dict for activation layer. + Default: dict(type='ReLU'). + inplace (bool): Whether to use inplace mode for activation. + Default: True. + with_spectral_norm (bool): Whether use spectral norm in conv module. + Default: False. + padding_mode (str): If the `padding_mode` has not been supported by + current `Conv2d` in PyTorch, we will use our own padding layer + instead. Currently, we support ['zeros', 'circular'] with official + implementation and ['reflect'] with our own implementation. + Default: 'zeros'. + order (tuple[str]): The order of conv/norm/activation layers. It is a + sequence of "conv", "norm" and "act". Common examples are + ("conv", "norm", "act") and ("act", "conv", "norm"). + Default: ('conv', 'norm', 'act'). + """ + + _abbr_ = 'conv_block' + + def __init__(self, + in_channels: int, + out_channels: int, + kernel_size: Union[int, Tuple[int, int]], + stride: Union[int, Tuple[int, int]] = 1, + padding: Union[int, Tuple[int, int]] = 0, + dilation: Union[int, Tuple[int, int]] = 1, + groups: int = 1, + bias: Union[bool, str] = 'auto', + conv_cfg: Optional[Dict] = None, + norm_cfg: Optional[Dict] = None, + act_cfg: Optional[Dict] = dict(type='ReLU'), + inplace: bool = True, + with_spectral_norm: bool = False, + padding_mode: str = 'zeros', + order: tuple = ('conv', 'norm', 'act')): + super().__init__() + assert conv_cfg is None or isinstance(conv_cfg, dict) + assert norm_cfg is None or isinstance(norm_cfg, dict) + assert act_cfg is None or isinstance(act_cfg, dict) + official_padding_mode = ['zeros', 'circular'] + self.conv_cfg = conv_cfg + self.norm_cfg = norm_cfg + self.act_cfg = act_cfg + self.inplace = inplace + self.with_spectral_norm = with_spectral_norm + self.with_explicit_padding = padding_mode not in official_padding_mode + self.order = order + assert isinstance(self.order, tuple) and len(self.order) == 3 + assert set(order) == {'conv', 'norm', 'act'} + + self.with_norm = norm_cfg is not None + self.with_activation = act_cfg is not None + # if the conv layer is before a norm layer, bias is unnecessary. + if bias == 'auto': + bias = not self.with_norm + self.with_bias = bias + + if self.with_explicit_padding: + pad_cfg = dict(type=padding_mode) + self.padding_layer = build_padding_layer(pad_cfg, padding) + + # reset padding to 0 for conv module + conv_padding = 0 if self.with_explicit_padding else padding + # build convolution layer + self.conv = build_conv_layer( + conv_cfg, + in_channels, + out_channels, + kernel_size, + stride=stride, + padding=conv_padding, + dilation=dilation, + groups=groups, + bias=bias) + # export the attributes of self.conv to a higher level for convenience + self.in_channels = self.conv.in_channels + self.out_channels = self.conv.out_channels + self.kernel_size = self.conv.kernel_size + self.stride = self.conv.stride + self.padding = padding + self.dilation = self.conv.dilation + self.transposed = self.conv.transposed + self.output_padding = self.conv.output_padding + self.groups = self.conv.groups + + if self.with_spectral_norm: + self.conv = nn.utils.spectral_norm(self.conv) + + # build normalization layers + if self.with_norm: + # norm layer is after conv layer + if order.index('norm') > order.index('conv'): + norm_channels = out_channels + else: + norm_channels = in_channels + self.norm_name, norm = build_norm_layer( + norm_cfg, norm_channels) # type: ignore + self.add_module(self.norm_name, norm) + if self.with_bias: + if isinstance(norm, (_BatchNorm, _InstanceNorm)): + warnings.warn( + 'Unnecessary conv bias before batch/instance norm') + else: + self.norm_name = None # type: ignore + + # build activation layer + if self.with_activation: + act_cfg_ = act_cfg.copy() # type: ignore + # nn.Tanh has no 'inplace' argument + if act_cfg_['type'] not in [ + 'Tanh', 'PReLU', 'Sigmoid', 'HSigmoid', 'Swish', 'GELU' + ]: + act_cfg_.setdefault('inplace', inplace) + self.activate = build_activation_layer(act_cfg_) + + # Use msra init by default + self.init_weights() + + @property + def norm(self): + if self.norm_name: + return getattr(self, self.norm_name) + else: + return None + + def init_weights(self): + # 1. It is mainly for customized conv layers with their own + # initialization manners by calling their own ``init_weights()``, + # and we do not want ConvModule to override the initialization. + # 2. For customized conv layers without their own initialization + # manners (that is, they don't have their own ``init_weights()``) + # and PyTorch's conv layers, they will be initialized by + # this method with default ``kaiming_init``. + # Note: For PyTorch's conv layers, they will be overwritten by our + # initialization implementation using default ``kaiming_init``. + if not hasattr(self.conv, 'init_weights'): + if self.with_activation and self.act_cfg['type'] == 'LeakyReLU': + nonlinearity = 'leaky_relu' + a = self.act_cfg.get('negative_slope', 0.01) + else: + nonlinearity = 'relu' + a = 0 + kaiming_init(self.conv, a=a, nonlinearity=nonlinearity) + if self.with_norm: + constant_init(self.norm, 1, bias=0) + + def forward(self, + x: torch.Tensor, + activate: bool = True, + norm: bool = True) -> torch.Tensor: + for layer in self.order: + if layer == 'conv': + if self.with_explicit_padding: + x = self.padding_layer(x) + x = self.conv(x) + elif layer == 'norm' and norm and self.with_norm: + x = self.norm(x) + elif layer == 'act' and activate and self.with_activation: + x = self.activate(x) + return x + diff --git a/segformer_plusplus/utils/benchmark.py b/segformer_plusplus/utils/benchmark.py new file mode 100644 index 0000000000000000000000000000000000000000..7c0b20c51f7c83d33e40d5210da9f1d4bbabb87d --- /dev/null +++ b/segformer_plusplus/utils/benchmark.py @@ -0,0 +1,76 @@ +# Copyright (c) Meta Platforms, Inc. and affiliates. +# All rights reserved. + +# Source: https://github.com/facebookresearch/ToMe/blob/main/tome/utils.py +# -------------------------------------------------------- + +import time +from typing import Tuple +import torch +from tqdm import tqdm + + +def benchmark( + model: torch.nn.Module, + device: torch.device = 0, + input_size: Tuple[int] = (3, 224, 224), + batch_size: int = 64, + runs: int = 40, + throw_out: float = 0.25, + use_fp16: bool = False, + verbose: bool = False, +) -> float: + """ + Benchmark the given model with random inputs at the given batch size. + + Args: + - model: the module to benchmark + - device: the device to use for benchmarking + - input_size: the input size to pass to the model (channels, h, w) + - batch_size: the batch size to use for evaluation + - runs: the number of total runs to do + - throw_out: the percentage of runs to throw out at the start of testing + - use_fp16: whether or not to benchmark with float16 and autocast + - verbose: whether or not to use tqdm to print progress / print throughput at end + + Returns: + - the throughput measured in images / second + """ + + if not isinstance(device, torch.device): + device = torch.device(device) + is_cuda = torch.device(device).type == "cuda" + + model = model.eval().to(device) + input = torch.rand(batch_size, *input_size, device=device) + if use_fp16: + input = input.half() + + warm_up = int(runs * throw_out) + total = 0 + start = time.time() + + with torch.autocast(device.type, enabled=use_fp16): + with torch.no_grad(): + for i in tqdm(range(runs), disable=not verbose, desc="Benchmarking"): + if i == warm_up: + if is_cuda: + torch.cuda.synchronize() + total = 0 + start = time.time() + + model(input) + total += batch_size + + if is_cuda: + torch.cuda.synchronize() + + end = time.time() + elapsed = end - start + + throughput = total / elapsed + + if verbose: + print(f"Throughput: {throughput:.2f} im/s") + + return throughput diff --git a/segformer_plusplus/utils/build_functions.py b/segformer_plusplus/utils/build_functions.py new file mode 100644 index 0000000000000000000000000000000000000000..0f5f92a42c55f5ba927e3c9be01fd88d495a5695 --- /dev/null +++ b/segformer_plusplus/utils/build_functions.py @@ -0,0 +1,156 @@ +from typing import Any, Optional, Union +import inspect +import torch.nn as nn +import torch + +from ..configs.config.config import Config, ConfigDict +from .registry import Registry +from ..utils.manager import ManagerMixin + + +TORCH_VERSION = torch.__version__ + +def build_from_cfg( + cfg: Union[dict, ConfigDict, Config], + registry: Registry, + default_args: Optional[Union[dict, ConfigDict, Config]] = None) -> Any: + """Build a module from config dict when it is a class configuration, or + call a function from config dict when it is a function configuration. + + If the global variable default scope (:obj:`DefaultScope`) exists, + :meth:`build` will firstly get the responding registry and then call + its own :meth:`build`. + + At least one of the ``cfg`` and ``default_args`` contains the key "type", + which should be either str or class. If they all contain it, the key + in ``cfg`` will be used because ``cfg`` has a high priority than + ``default_args`` that means if a key exists in both of them, the value of + the key will be ``cfg[key]``. They will be merged first and the key "type" + will be popped up and the remaining keys will be used as initialization + arguments. + + Examples: + >>> from mmengine import Registry, build_from_cfg + >>> MODELS = Registry('models') + >>> @MODELS.register_module() + >>> class ResNet: + >>> def __init__(self, depth, stages=4): + >>> self.depth = depth + >>> self.stages = stages + >>> cfg = dict(type='ResNet', depth=50) + >>> model = build_from_cfg(cfg, MODELS) + >>> # Returns an instantiated object + >>> @MODELS.register_module() + >>> def resnet50(): + >>> pass + >>> resnet = build_from_cfg(dict(type='resnet50'), MODELS) + >>> # Return a result of the calling function + + Args: + cfg (dict or ConfigDict or Config): Config dict. It should at least + contain the key "type". + registry (:obj:`Registry`): The registry to search the type from. + default_args (dict or ConfigDict or Config, optional): Default + initialization arguments. Defaults to None. + + Returns: + object: The constructed object. + """ + + if not isinstance(cfg, (dict, ConfigDict, Config)): + raise TypeError( + f'cfg should be a dict, ConfigDict or Config, but got {type(cfg)}') + + if 'type' not in cfg: + if default_args is None or 'type' not in default_args: + raise KeyError( + '`cfg` or `default_args` must contain the key "type", ' + f'but got {cfg}\n{default_args}') + + if not isinstance(registry, Registry): + raise TypeError('registry must be a mmengine.Registry object, ' + f'but got {type(registry)}') + + if not (isinstance(default_args, + (dict, ConfigDict, Config)) or default_args is None): + raise TypeError( + 'default_args should be a dict, ConfigDict, Config or None, ' + f'but got {type(default_args)}') + + args = cfg.copy() + if default_args is not None: + for name, value in default_args.items(): + args.setdefault(name, value) + + # Instance should be built under target scope, if `_scope_` is defined + # in cfg, current default scope should switch to specified scope + # temporarily. + scope = args.pop('_scope_', None) + with registry.switch_scope_and_registry(scope) as registry: + obj_type = args.pop('type') + if isinstance(obj_type, str): + obj_cls = registry.get(obj_type) + if obj_cls is None: + raise KeyError( + f'{obj_type} is not in the {registry.scope}::{registry.name} registry. ' # noqa: E501 + f'Please check whether the value of `{obj_type}` is ' + 'correct or it was registered as expected. More details ' + 'can be found at ' + 'https://mmengine.readthedocs.io/en/latest/advanced_tutorials/config.html#import-the-custom-module' # noqa: E501 + ) + # this will include classes, functions, partial functions and more + elif callable(obj_type): + obj_cls = obj_type + else: + raise TypeError( + f'type must be a str or valid type, but got {type(obj_type)}') + + # If `obj_cls` inherits from `ManagerMixin`, it should be + # instantiated by `ManagerMixin.get_instance` to ensure that it + # can be accessed globally. + if inspect.isclass(obj_cls) and \ + issubclass(obj_cls, ManagerMixin): # type: ignore + obj = obj_cls.get_instance(**args) # type: ignore + else: + obj = obj_cls(**args) # type: ignore + return obj + + +def build_model_from_cfg( + cfg: Union[dict, ConfigDict, Config], + registry: Registry, + default_args: Optional[Union[dict, 'ConfigDict', 'Config']] = None +) -> 'nn.Module': + """Build a PyTorch model from config dict(s). Different from + ``build_from_cfg``, if cfg is a list, a ``nn.Sequential`` will be built. + + Args: + cfg (dict, list[dict]): The config of modules, which is either a config + dict or a list of config dicts. If cfg is a list, the built + modules will be wrapped with ``nn.Sequential``. + registry (:obj:`Registry`): A registry the module belongs to. + default_args (dict, optional): Default arguments to build the module. + Defaults to None. + + Returns: + nn.Module: A built nn.Module. + """ + from ..model.base_module import Sequential + if isinstance(cfg, list): + modules = [ + build_from_cfg(_cfg, registry, default_args) for _cfg in cfg + ] + return Sequential(*modules) + else: + return build_from_cfg(cfg, registry, default_args) + + +class SyncBatchNorm(torch.nn.SyncBatchNorm): # type: ignore + + def _check_input_dim(self, input): + if TORCH_VERSION == 'parrots': + if input.dim() < 2: + raise ValueError( + f'expected at least 2D input (got {input.dim()}D input)') + else: + super()._check_input_dim(input) \ No newline at end of file diff --git a/segformer_plusplus/utils/embed.py b/segformer_plusplus/utils/embed.py new file mode 100644 index 0000000000000000000000000000000000000000..0cfeb32bc108c3c02f7452c01b1cf5f6ab5bf516 --- /dev/null +++ b/segformer_plusplus/utils/embed.py @@ -0,0 +1,217 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import math +from typing import Sequence +from itertools import repeat +import collections.abc +import torch.nn as nn +import torch.nn.functional as F + +from ..model.base_module import BaseModule +from .activation import build_conv_layer, build_norm_layer + + +class AdaptivePadding(nn.Module): + """Applies padding to input (if needed) so that input can get fully covered + by filter you specified. It supports two modes "same" and "corner". The + "same" mode is same with "SAME" padding mode in TensorFlow, pad zero around + input. The "corner" mode would pad zero to bottom right. + + Args: + kernel_size (int | tuple): Size of the kernel: + stride (int | tuple): Stride of the filter. Default: 1: + dilation (int | tuple): Spacing between kernel elements. + Default: 1. + padding (str): Support "same" and "corner", "corner" mode + would pad zero to bottom right, and "same" mode would + pad zero around input. Default: "corner". + Example: + >>> kernel_size = 16 + >>> stride = 16 + >>> dilation = 1 + >>> input = torch.rand(1, 1, 15, 17) + >>> adap_pad = AdaptivePadding( + >>> kernel_size=kernel_size, + >>> stride=stride, + >>> dilation=dilation, + >>> padding="corner") + >>> out = adap_pad(input) + >>> assert (out.shape[2], out.shape[3]) == (16, 32) + >>> input = torch.rand(1, 1, 16, 17) + >>> out = adap_pad(input) + >>> assert (out.shape[2], out.shape[3]) == (16, 32) + """ + + def __init__(self, kernel_size=1, stride=1, dilation=1, padding='corner'): + + super().__init__() + + assert padding in ('same', 'corner') + + kernel_size = to_2tuple(kernel_size) + stride = to_2tuple(stride) + dilation = to_2tuple(dilation) + + self.padding = padding + self.kernel_size = kernel_size + self.stride = stride + self.dilation = dilation + + def get_pad_shape(self, input_shape): + input_h, input_w = input_shape + kernel_h, kernel_w = self.kernel_size + stride_h, stride_w = self.stride + output_h = math.ceil(input_h / stride_h) + output_w = math.ceil(input_w / stride_w) + pad_h = max((output_h - 1) * stride_h + + (kernel_h - 1) * self.dilation[0] + 1 - input_h, 0) + pad_w = max((output_w - 1) * stride_w + + (kernel_w - 1) * self.dilation[1] + 1 - input_w, 0) + return pad_h, pad_w + + def forward(self, x): + pad_h, pad_w = self.get_pad_shape(x.size()[-2:]) + if pad_h > 0 or pad_w > 0: + if self.padding == 'corner': + x = F.pad(x, [0, pad_w, 0, pad_h]) + elif self.padding == 'same': + x = F.pad(x, [ + pad_w // 2, pad_w - pad_w // 2, pad_h // 2, + pad_h - pad_h // 2 + ]) + return x + + +class PatchEmbed(BaseModule): + """Image to Patch Embedding. + + We use a conv layer to implement PatchEmbed. + + Args: + in_channels (int): The num of input channels. Default: 3 + embed_dims (int): The dimensions of embedding. Default: 768 + conv_type (str): The config dict for embedding + conv layer type selection. Default: "Conv2d". + kernel_size (int): The kernel_size of embedding conv. Default: 16. + stride (int, optional): The slide stride of embedding conv. + Default: None (Would be set as `kernel_size`). + padding (int | tuple | string ): The padding length of + embedding conv. When it is a string, it means the mode + of adaptive padding, support "same" and "corner" now. + Default: "corner". + dilation (int): The dilation rate of embedding conv. Default: 1. + bias (bool): Bias of embed conv. Default: True. + norm_cfg (dict, optional): Config dict for normalization layer. + Default: None. + input_size (int | tuple | None): The size of input, which will be + used to calculate the out size. Only work when `dynamic_size` + is False. Default: None. + init_cfg (`mmengine.ConfigDict`, optional): The Config for + initialization. Default: None. + """ + + def __init__(self, + in_channels=3, + embed_dims=768, + conv_type='Conv2d', + kernel_size=16, + stride=None, + padding='corner', + dilation=1, + bias=True, + norm_cfg=None, + input_size=None, + init_cfg=None): + super().__init__(init_cfg=init_cfg) + + self.embed_dims = embed_dims + if stride is None: + stride = kernel_size + + kernel_size = to_2tuple(kernel_size) + stride = to_2tuple(stride) + dilation = to_2tuple(dilation) + + if isinstance(padding, str): + self.adap_padding = AdaptivePadding( + kernel_size=kernel_size, + stride=stride, + dilation=dilation, + padding=padding) + # disable the padding of conv + padding = 0 + else: + self.adap_padding = None + padding = to_2tuple(padding) + + self.projection = build_conv_layer( + dict(type=conv_type), + in_channels=in_channels, + out_channels=embed_dims, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + bias=bias) + + if norm_cfg is not None: + self.norm = build_norm_layer(norm_cfg, embed_dims)[1] + else: + self.norm = None + + if input_size: + input_size = to_2tuple(input_size) + # `init_out_size` would be used outside to + # calculate the num_patches + # when `use_abs_pos_embed` outside + self.init_input_size = input_size + if self.adap_padding: + pad_h, pad_w = self.adap_padding.get_pad_shape(input_size) + input_h, input_w = input_size + input_h = input_h + pad_h + input_w = input_w + pad_w + input_size = (input_h, input_w) + + # https://pytorch.org/docs/stable/generated/torch.nn.Conv2d.html + h_out = (input_size[0] + 2 * padding[0] - dilation[0] * + (kernel_size[0] - 1) - 1) // stride[0] + 1 + w_out = (input_size[1] + 2 * padding[1] - dilation[1] * + (kernel_size[1] - 1) - 1) // stride[1] + 1 + self.init_out_size = (h_out, w_out) + else: + self.init_input_size = None + self.init_out_size = None + + def forward(self, x): + """ + Args: + x (Tensor): Has shape (B, C, H, W). In most case, C is 3. + + Returns: + tuple: Contains merged results and its spatial shape. + + - x (Tensor): Has shape (B, out_h * out_w, embed_dims) + - out_size (tuple[int]): Spatial shape of x, arrange as + (out_h, out_w). + """ + + if self.adap_padding: + x = self.adap_padding(x) + + x = self.projection(x) + out_size = (x.shape[2], x.shape[3]) + x = x.flatten(2).transpose(1, 2) + if self.norm is not None: + x = self.norm(x) + return x, out_size + +# From PyTorch internals +def _ntuple(n): + + def parse(x): + if isinstance(x, collections.abc.Iterable): + return x + return tuple(repeat(x, n)) + + return parse + +to_2tuple = _ntuple(2) diff --git a/segformer_plusplus/utils/imagenet_weights.py b/segformer_plusplus/utils/imagenet_weights.py new file mode 100644 index 0000000000000000000000000000000000000000..e9ef1d12b01027e22b2053c796f12a9a2e554ef5 --- /dev/null +++ b/segformer_plusplus/utils/imagenet_weights.py @@ -0,0 +1,8 @@ +imagenet_weights = { + 'b0': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b0_20220624-7e0fe6dd.pth', + 'b1': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b1_20220624-02e5a6a1.pth', + 'b2': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b2_20220624-66e8bf70.pth', + 'b3': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b3_20220624-13b1141c.pth', + 'b4': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b4_20220624-d588d980.pth', + 'b5': 'https://download.openmmlab.com/mmsegmentation/v0.5/pretrain/segformer/mit_b5_20220624-658746d9.pth' +} \ No newline at end of file diff --git a/segformer_plusplus/utils/manager.py b/segformer_plusplus/utils/manager.py new file mode 100644 index 0000000000000000000000000000000000000000..33482836c748d9fedab64762d30094fe174565ef --- /dev/null +++ b/segformer_plusplus/utils/manager.py @@ -0,0 +1,149 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import inspect +import threading +import warnings +from collections import OrderedDict +from typing import Type, TypeVar + +_lock = threading.RLock() +T = TypeVar('T') + + +def _accquire_lock() -> None: + """Acquire the module-level lock for serializing access to shared data. + + This should be released with _release_lock(). + """ + if _lock: + _lock.acquire() + + +def _release_lock() -> None: + """Release the module-level lock acquired by calling _accquire_lock().""" + if _lock: + _lock.release() + + +class ManagerMeta(type): + """The metaclass for global accessible class. + + The subclasses inheriting from ``ManagerMeta`` will manage their + own ``_instance_dict`` and root instances. The constructors of subclasses + must contain the ``name`` argument. + + Examples: + >>> class SubClass1(metaclass=ManagerMeta): + >>> def __init__(self, *args, **kwargs): + >>> pass + AssertionError: .__init__ must have the + name argument. + >>> class SubClass2(metaclass=ManagerMeta): + >>> def __init__(self, name): + >>> pass + >>> # valid format. + """ + + def __init__(cls, *args): + cls._instance_dict = OrderedDict() + params = inspect.getfullargspec(cls) + params_names = params[0] if params[0] else [] + assert 'name' in params_names, f'{cls} must have the `name` argument' + super().__init__(*args) + + +class ManagerMixin(metaclass=ManagerMeta): + """``ManagerMixin`` is the base class for classes that have global access + requirements. + + The subclasses inheriting from ``ManagerMixin`` can get their + global instances. + + Examples: + >>> class GlobalAccessible(ManagerMixin): + >>> def __init__(self, name=''): + >>> super().__init__(name) + >>> + >>> GlobalAccessible.get_instance('name') + >>> instance_1 = GlobalAccessible.get_instance('name') + >>> instance_2 = GlobalAccessible.get_instance('name') + >>> assert id(instance_1) == id(instance_2) + + Args: + name (str): Name of the instance. Defaults to ''. + """ + + def __init__(self, name: str = ''): + assert isinstance(name, str) and name, \ + 'name argument must be an non-empty string.' + self._instance_name = name + + @classmethod + def get_instance(cls: Type[T], name: str, **kwargs) -> T: + """Get subclass instance by name if the name exists. + + If corresponding name instance has not been created, ``get_instance`` + will create an instance, otherwise ``get_instance`` will return the + corresponding instance. + + Examples + >>> instance1 = GlobalAccessible.get_instance('name1') + >>> # Create name1 instance. + >>> instance.instance_name + name1 + >>> instance2 = GlobalAccessible.get_instance('name1') + >>> # Get name1 instance. + >>> assert id(instance1) == id(instance2) + + Args: + name (str): Name of instance. Defaults to ''. + + Returns: + object: Corresponding name instance, the latest instance, or root + instance. + """ + _accquire_lock() + assert isinstance(name, str), \ + f'type of name should be str, but got {type(cls)}' + instance_dict = cls._instance_dict # type: ignore + # Get the instance by name. + if name not in instance_dict: + instance = cls(name=name, **kwargs) # type: ignore + instance_dict[name] = instance # type: ignore + elif kwargs: + warnings.warn( + f'{cls} instance named of {name} has been created, ' + 'the method `get_instance` should not accept any other ' + 'arguments') + # Get latest instantiated instance or root instance. + _release_lock() + return instance_dict[name] + + @classmethod + def get_current_instance(cls): + """Get latest created instance. + + Before calling ``get_current_instance``, The subclass must have called + ``get_instance(xxx)`` at least once. + + Examples + >>> instance = GlobalAccessible.get_current_instance() + AssertionError: At least one of name and current needs to be set + >>> instance = GlobalAccessible.get_instance('name1') + >>> instance.instance_name + name1 + >>> instance = GlobalAccessible.get_current_instance() + >>> instance.instance_name + name1 + + Returns: + object: Latest created instance. + """ + _accquire_lock() + if not cls._instance_dict: + raise RuntimeError( + f'Before calling {cls.__name__}.get_current_instance(), you ' + 'should call get_instance(name=xxx) at least once.') + name = next(iter(reversed(cls._instance_dict))) + _release_lock() + return cls._instance_dict[name] + diff --git a/segformer_plusplus/utils/registry.py b/segformer_plusplus/utils/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..dbe5984fb753bb466ad50fdae86314a15601910a --- /dev/null +++ b/segformer_plusplus/utils/registry.py @@ -0,0 +1,7 @@ + +from ..Registry.registry import Registry + +MODELS = Registry( + 'models', + locations=['segformer_plusplus.model.backbone', 'segformer_plusplus.model.head'] +) diff --git a/segformer_plusplus/utils/shape_convert.py b/segformer_plusplus/utils/shape_convert.py new file mode 100644 index 0000000000000000000000000000000000000000..cce1e220b645d4b02df1ec2d9ed3137c8acba707 --- /dev/null +++ b/segformer_plusplus/utils/shape_convert.py @@ -0,0 +1,107 @@ +# Copyright (c) OpenMMLab. All rights reserved. +def nlc_to_nchw(x, hw_shape): + """Convert [N, L, C] shape tensor to [N, C, H, W] shape tensor. + + Args: + x (Tensor): The input tensor of shape [N, L, C] before conversion. + hw_shape (Sequence[int]): The height and width of output feature map. + + Returns: + Tensor: The output tensor of shape [N, C, H, W] after conversion. + """ + H, W = hw_shape + assert len(x.shape) == 3 + B, L, C = x.shape + assert L == H * W, 'The seq_len doesn\'t match H, W' + return x.transpose(1, 2).reshape(B, C, H, W) + + +def nchw_to_nlc(x): + """Flatten [N, C, H, W] shape tensor to [N, L, C] shape tensor. + + Args: + x (Tensor): The input tensor of shape [N, C, H, W] before conversion. + + Returns: + Tensor: The output tensor of shape [N, L, C] after conversion. + """ + assert len(x.shape) == 4 + return x.flatten(2).transpose(1, 2).contiguous() + + +def nchw2nlc2nchw(module, x, contiguous=False, **kwargs): + """Flatten [N, C, H, W] shape tensor `x` to [N, L, C] shape tensor. Use the + reshaped tensor as the input of `module`, and the convert the output of + `module`, whose shape is. + + [N, L, C], to [N, C, H, W]. + + Args: + module (Callable): A callable object the takes a tensor + with shape [N, L, C] as input. + x (Tensor): The input tensor of shape [N, C, H, W]. + contiguous: + contiguous (Bool): Whether to make the tensor contiguous + after each shape transform. + + Returns: + Tensor: The output tensor of shape [N, C, H, W]. + + Example: + >>> import torch + >>> import torch.nn as nn + >>> norm = nn.LayerNorm(4) + >>> feature_map = torch.rand(4, 4, 5, 5) + >>> output = nchw2nlc2nchw(norm, feature_map) + """ + B, C, H, W = x.shape + if not contiguous: + x = x.flatten(2).transpose(1, 2) + x = module(x, **kwargs) + x = x.transpose(1, 2).reshape(B, C, H, W) + else: + x = x.flatten(2).transpose(1, 2).contiguous() + x = module(x, **kwargs) + x = x.transpose(1, 2).reshape(B, C, H, W).contiguous() + return x + + +def nlc2nchw2nlc(module, x, hw_shape, contiguous=False, **kwargs): + """Convert [N, L, C] shape tensor `x` to [N, C, H, W] shape tensor. Use the + reshaped tensor as the input of `module`, and convert the output of + `module`, whose shape is. + + [N, C, H, W], to [N, L, C]. + + Args: + module (Callable): A callable object the takes a tensor + with shape [N, C, H, W] as input. + x (Tensor): The input tensor of shape [N, L, C]. + hw_shape: (Sequence[int]): The height and width of the + feature map with shape [N, C, H, W]. + contiguous (Bool): Whether to make the tensor contiguous + after each shape transform. + + Returns: + Tensor: The output tensor of shape [N, L, C]. + + Example: + >>> import torch + >>> import torch.nn as nn + >>> conv = nn.Conv2d(16, 16, 3, 1, 1) + >>> feature_map = torch.rand(4, 25, 16) + >>> output = nlc2nchw2nlc(conv, feature_map, (5, 5)) + """ + H, W = hw_shape + assert len(x.shape) == 3 + B, L, C = x.shape + assert L == H * W, 'The seq_len doesn\'t match H, W' + if not contiguous: + x = x.transpose(1, 2).reshape(B, C, H, W) + x = module(x, **kwargs) + x = x.flatten(2).transpose(1, 2) + else: + x = x.transpose(1, 2).reshape(B, C, H, W).contiguous() + x = module(x, **kwargs) + x = x.flatten(2).transpose(1, 2).contiguous() + return x diff --git a/segformer_plusplus/utils/tome_presets.py b/segformer_plusplus/utils/tome_presets.py new file mode 100644 index 0000000000000000000000000000000000000000..152c88689834f48338198657974de42db406957b --- /dev/null +++ b/segformer_plusplus/utils/tome_presets.py @@ -0,0 +1,20 @@ +tome_presets = { + 'bsm_hq': [ + dict(q_mode=None, kv_mode='bsm', kv_r=0.6, kv_sx=2, kv_sy=2), + dict(q_mode=None, kv_mode='bsm', kv_r=0.6, kv_sx=2, kv_sy=2), + dict(q_mode='bsm', kv_mode=None, q_r=0.8, q_sx=4, q_sy=4), + dict(q_mode='bsm', kv_mode=None, q_r=0.8, q_sx=4, q_sy=4) + ], + 'bsm_fast': [ + dict(q_mode=None, kv_mode='bsm_r2D', kv_r=0.9, kv_sx=4, kv_sy=4), + dict(q_mode=None, kv_mode='bsm_r2D', kv_r=0.9, kv_sx=4, kv_sy=4), + dict(q_mode='bsm_r2D', kv_mode=None, q_r=0.9, q_sx=4, q_sy=4), + dict(q_mode='bsm_r2D', kv_mode=None, q_r=0.9, q_sx=4, q_sy=4) + ], + 'n2d_2x2': [ + dict(q_mode='neighbor_2D', kv_mode=None, q_s=(2, 2)), + dict(q_mode='neighbor_2D', kv_mode=None, q_s=(2, 2)), + dict(q_mode='neighbor_2D', kv_mode=None, q_s=(2, 2)), + dict(q_mode='neighbor_2D', kv_mode=None, q_s=(2, 2)) + ] +} diff --git a/segformer_plusplus/utils/version_utils.py b/segformer_plusplus/utils/version_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e4b43624fa811026d32bc86c73ace88df594ade7 --- /dev/null +++ b/segformer_plusplus/utils/version_utils.py @@ -0,0 +1,64 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import os +import subprocess +import warnings + +from packaging.version import parse + + +def digit_version(version_str: str, length: int = 4): + """Convert a version string into a tuple of integers. + + This method is usually used for comparing two versions. For pre-release + versions: alpha < beta < rc. + + Args: + version_str (str): The version string. + length (int): The maximum number of version levels. Defaults to 4. + + Returns: + tuple[int]: The version info in digits (integers). + """ + assert 'parrots' not in version_str + version = parse(version_str) + assert version.release, f'failed to parse version {version_str}' + release = list(version.release) + release = release[:length] + if len(release) < length: + release = release + [0] * (length - len(release)) + if version.is_prerelease: + mapping = {'a': -3, 'b': -2, 'rc': -1} + val = -4 + # version.pre can be None + if version.pre: + if version.pre[0] not in mapping: + warnings.warn(f'unknown prerelease version {version.pre[0]}, ' + 'version checking may go wrong') + else: + val = mapping[version.pre[0]] + release.extend([val, version.pre[-1]]) + else: + release.extend([val, 0]) + + elif version.is_postrelease: + release.extend([1, version.post]) # type: ignore + else: + release.extend([0, 0]) + return tuple(release) + + +def _minimal_ext_cmd(cmd): + # construct minimal environment + env = {} + for k in ['SYSTEMROOT', 'PATH', 'HOME']: + v = os.environ.get(k) + if v is not None: + env[k] = v + # LANGUAGE is used on win32 + env['LANGUAGE'] = 'C' + env['LANG'] = 'C' + env['LC_ALL'] = 'C' + out, err = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, + env=env).communicate() + return out diff --git a/segformer_plusplus/utils/wrappers.py b/segformer_plusplus/utils/wrappers.py new file mode 100644 index 0000000000000000000000000000000000000000..183f0fc4912ea211f4de702c20e4f7b858367f12 --- /dev/null +++ b/segformer_plusplus/utils/wrappers.py @@ -0,0 +1,109 @@ +# Copyright (c) OpenMMLab. All rights reserved. +import warnings +import torch +import torch.nn as nn +import torch.nn.functional as F + +from .registry import MODELS + + +TORCH_VERSION = torch.__version__ + +def resize(input, + size=None, + scale_factor=None, + mode='nearest', + align_corners=None, + warning=True): + if warning: + if size is not None and align_corners: + input_h, input_w = tuple(int(x) for x in input.shape[2:]) + output_h, output_w = tuple(int(x) for x in size) + if output_h > input_h or output_w > output_h: + if ((output_h > 1 and output_w > 1 and input_h > 1 + and input_w > 1) and (output_h - 1) % (input_h - 1) + and (output_w - 1) % (input_w - 1)): + warnings.warn( + f'When align_corners={align_corners}, ' + 'the output would more aligned if ' + f'input size {(input_h, input_w)} is `x+1` and ' + f'out size {(output_h, output_w)} is `nx+1`') + return F.interpolate(input, size, scale_factor, mode, align_corners) + + +@MODELS.register_module('Conv', force=True) +class Conv2d(nn.Conv2d): + + def forward(self, x: torch.Tensor) -> torch.Tensor: + if x.numel() == 0 and obsolete_torch_version(TORCH_VERSION, (1, 4)): + out_shape = [x.shape[0], self.out_channels] + for i, k, p, s, d in zip(x.shape[-2:], self.kernel_size, + self.padding, self.stride, self.dilation): + o = (i + 2 * p - (d * (k - 1) + 1)) // s + 1 + out_shape.append(o) + empty = NewEmptyTensorOp.apply(x, out_shape) + if self.training: + # produce dummy gradient to avoid DDP warning. + dummy = sum(x.view(-1)[0] for x in self.parameters()) * 0.0 + return empty + dummy + else: + return empty + + return super().forward(x) + + +class NewEmptyTensorOp(torch.autograd.Function): + + @staticmethod + def forward(ctx, x: torch.Tensor, new_shape: tuple) -> torch.Tensor: + ctx.shape = x.shape + return x.new_empty(new_shape) + + @staticmethod + def backward(ctx, grad: torch.Tensor) -> tuple: + shape = ctx.shape + return NewEmptyTensorOp.apply(grad, shape), None + + +def obsolete_torch_version(torch_version, version_threshold) -> bool: + return torch_version == 'parrots' or torch_version <= version_threshold + + +@MODELS.register_module() +class DropPath(nn.Module): + """Drop paths (Stochastic Depth) per sample (when applied in main path of + residual blocks). + + We follow the implementation + https://github.com/rwightman/pytorch-image-models/blob/a2727c1bf78ba0d7b5727f5f95e37fb7f8866b1f/timm/models/layers/drop.py # noqa: E501 + + Args: + drop_prob (float): Probability of the path to be zeroed. Default: 0.1 + """ + + def __init__(self, drop_prob: float = 0.1): + super().__init__() + self.drop_prob = drop_prob + + def forward(self, x: torch.Tensor) -> torch.Tensor: + return drop_path(x, self.drop_prob, self.training) + + +def drop_path(x: torch.Tensor, + drop_prob: float = 0., + training: bool = False) -> torch.Tensor: + """Drop paths (Stochastic Depth) per sample (when applied in main path of + residual blocks). + + We follow the implementation + https://github.com/rwightman/pytorch-image-models/blob/a2727c1bf78ba0d7b5727f5f95e37fb7f8866b1f/timm/models/layers/drop.py # noqa: E501 + """ + if drop_prob == 0. or not training: + return x + keep_prob = 1 - drop_prob + # handle tensors with different dimensions, not just 4D tensors. + shape = (x.shape[0], ) + (1, ) * (x.ndim - 1) + random_tensor = keep_prob + torch.rand( + shape, dtype=x.dtype, device=x.device) + output = x.div(keep_prob) * random_tensor.floor() + return output \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..c07b49e770642b514e773574902828a1c0c699ed --- /dev/null +++ b/setup.py @@ -0,0 +1,12 @@ +from setuptools import find_packages, setup + +setup( + name="segformer_plusplus", + version="0.2", + author="Marco Kantonis", + description="Segformer++: Efficient Token-Merging Strategies for High-Resolution Semantic Segmentation", + install_requires=['torch>=2.0.1', 'tomesd','omegaconf', 'pyyaml'], + packages=find_packages(), + license='MIT', + long_description="https://arxiv.org/abs/2405.14467" +)