odnnlib.tflib.networkNetwork)}(versionKnameG static_kwargs}( num_channelsK resolutionM label_sizeK resolution_hM resolution_wMu components}( synthesish)}(hKh G_synthesish}(h Kh Mh Kh MhMuh}build_module_srcXՠ# Copyright (c) 2019, NVIDIA Corporation. All rights reserved. # # This work is made available under the Nvidia Source Code License-NC. # To view a copy of this license, visit # https://nvlabs.github.io/stylegan2/license.html """Network architectures used in the StyleGAN2 paper.""" import os import numpy as np import tensorflow as tf import dnnlib import dnnlib.tflib as tflib from dnnlib.tflib.ops.upfirdn_2d import upsample_2d, downsample_2d, upsample_conv_2d, conv_downsample_2d from dnnlib.tflib.ops.fused_bias_act import fused_bias_act import functools from dnnlib.tflib.autosummary import autosummary#, autoimages # NOTE: Do not import any application-specific modules here! # Specify all network parameters as kwargs. def _i(x): return tf.transpose(x, [0,2,3,1]) def _o(x): return tf.transpose(x, [0,3,1,2]) #---------------------------------------------------------------------------- # Get/create weight tensor for a convolution or fully-connected layer. def get_weight(shape, gain=1, use_wscale=True, lrmul=1, weight_var='weight'): fan_in = np.prod(shape[:-1]) # [kernel, kernel, fmaps_in, fmaps_out] or [in, out] he_std = gain / np.sqrt(fan_in) # He init # Equalized learning rate and custom learning rate multiplier. if use_wscale: init_std = 1.0 / lrmul runtime_coef = he_std * lrmul else: init_std = he_std / lrmul runtime_coef = lrmul # Create variable. init = tf.initializers.random_normal(0, init_std) return tf.get_variable(weight_var, shape=shape, initializer=init, use_resource=True) * runtime_coef #---------------------------------------------------------------------------- # Fully-connected layer. def dense_layer(x, fmaps, gain=1, use_wscale=True, lrmul=1, weight_var='weight'): if len(x.shape) > 2: x = tf.reshape(x, [-1, np.prod([d.value for d in x.shape[1:]])]) w = get_weight([x.shape[1].value, fmaps], gain=gain, use_wscale=use_wscale, lrmul=lrmul, weight_var=weight_var) w = tf.cast(w, x.dtype) return tf.matmul(x, w) #---------------------------------------------------------------------------- # Convolution layer with optional upsampling or downsampling. def conv2d_layer(x, fmaps, kernel, up=False, down=False, resample_kernel=None, gain=1, use_wscale=True, lrmul=1, weight_var='weight'): assert not (up and down) assert kernel >= 1 and kernel % 2 == 1 w = graph_spectral_norm(get_weight([kernel, kernel, x.shape[1].value, fmaps], gain=gain, use_wscale=use_wscale, lrmul=lrmul, weight_var=weight_var)) if up: x = _o(upsample_conv_2d(_i(x), tf.cast(w, x.dtype), data_format='NHWC', k=resample_kernel)) elif down: x = _o(conv_downsample_2d(_i(x), tf.cast(w, x.dtype), data_format='NHWC', k=resample_kernel)) else: x = _o(tf.nn.conv2d(_i(x), tf.cast(w, x.dtype), data_format='NHWC', strides=[1,1,1,1], padding='SAME')) return x #---------------------------------------------------------------------------- # Apply bias and activation func. def apply_bias_act(x, act='linear', alpha=None, gain=None, lrmul=1, bias_var='bias'): b = tf.get_variable(bias_var, shape=[x.shape[1]], initializer=tf.initializers.zeros(), use_resource=True) * lrmul return fused_bias_act(x, b=tf.cast(b, x.dtype), act=act, alpha=alpha, gain=gain) #---------------------------------------------------------------------------- # Naive upsampling (nearest neighbor) and downsampling (average pooling). def naive_upsample_2d(x, factor=2): with tf.variable_scope('NaiveUpsample'): _N, C, H, W = x.shape.as_list() x = tf.reshape(x, [-1, C, H, 1, W, 1]) x = tf.tile(x, [1, 1, 1, factor, 1, factor]) return tf.reshape(x, [-1, C, H * factor, W * factor]) def naive_downsample_2d(x, factor=2): with tf.variable_scope('NaiveDownsample'): _N, C, H, W = x.shape.as_list() x = tf.reshape(x, [-1, C, H // factor, factor, W // factor, factor]) return tf.reduce_mean(x, axis=[3,5]) #---------------------------------------------------------------------------- # Modulated convolution layer. def modulated_conv2d_layer(x, y, fmaps, kernel, up=False, down=False, demodulate=True, resample_kernel=None, gain=1, use_wscale=True, lrmul=1, fused_modconv=True, weight_var='weight', mod_weight_var='mod_weight', mod_bias_var='mod_bias'): assert not (up and down) assert kernel >= 1 and kernel % 2 == 1 # Get weight. w = graph_spectral_norm(get_weight([kernel, kernel, x.shape[1].value, fmaps], gain=gain, use_wscale=use_wscale, lrmul=lrmul, weight_var=weight_var)) ww = w[np.newaxis] # [BkkIO] Introduce minibatch dimension. # Modulate. s = dense_layer(y, fmaps=x.shape[1].value, weight_var=mod_weight_var) # [BI] Transform incoming W to style. s = apply_bias_act(s, bias_var=mod_bias_var) + 1 # [BI] Add bias (initially 1). ww *= tf.cast(s[:, np.newaxis, np.newaxis, :, np.newaxis], w.dtype) # [BkkIO] Scale input feature maps. # Demodulate. if demodulate: d = tf.rsqrt(tf.reduce_sum(tf.square(ww), axis=[1,2,3]) + 1e-8) # [BO] Scaling factor. ww *= d[:, np.newaxis, np.newaxis, np.newaxis, :] # [BkkIO] Scale output feature maps. # Reshape/scale input. if fused_modconv: x = tf.reshape(x, [1, -1, x.shape[2], x.shape[3]]) # Fused => reshape minibatch to convolution groups. w = tf.reshape(tf.transpose(ww, [1, 2, 3, 0, 4]), [ww.shape[1], ww.shape[2], ww.shape[3], -1]) else: x *= tf.cast(s[:, :, np.newaxis, np.newaxis], x.dtype) # [BIhw] Not fused => scale input activations. # Convolution with optional up/downsampling. if up: x = _o(upsample_conv_2d(_i(x), tf.cast(w, x.dtype), data_format='NHWC', k=resample_kernel)) elif down: x = _o(conv_downsample_2d(_i(x), tf.cast(w, x.dtype), data_format='NHWC', k=resample_kernel)) else: x = _o(tf.nn.conv2d(_i(x), tf.cast(w, x.dtype), data_format='NHWC', strides=[1,1,1,1], padding='SAME')) # Reshape/scale output. if fused_modconv: x = tf.reshape(x, [-1, fmaps, x.shape[2], x.shape[3]]) # Fused => reshape convolution groups back to minibatch. elif demodulate: x *= tf.cast(d[:, :, np.newaxis, np.newaxis], x.dtype) # [BOhw] Not fused => scale output activations. return x #---------------------------------------------------------------------------- # Minibatch standard deviation layer. def minibatch_stddev_layer(x, group_size=4, num_new_features=1): group_size = tf.minimum(group_size, tf.shape(x)[0]) # Minibatch must be divisible by (or smaller than) group_size. s = x.shape # [NCHW] Input shape. y = tf.reshape(x, [group_size, -1, num_new_features, s[1]//num_new_features, s[2], s[3]]) # [GMncHW] Split minibatch into M groups of size G. Split channels into n channel groups c. y = tf.cast(y, tf.float32) # [GMncHW] Cast to FP32. y -= tf.reduce_mean(y, axis=0, keepdims=True) # [GMncHW] Subtract mean over group. y = tf.reduce_mean(tf.square(y), axis=0) # [MncHW] Calc variance over group. y = tf.sqrt(y + 1e-8) # [MncHW] Calc stddev over group. y = tf.reduce_mean(y, axis=[2,3,4], keepdims=True) # [Mn111] Take average over fmaps and pixels. y = tf.reduce_mean(y, axis=[2]) # [Mn11] Split channels into c channel groups y = tf.cast(y, x.dtype) # [Mn11] Cast back to original data type. y = tf.tile(y, [group_size, 1, s[2], s[3]]) # [NnHW] Replicate over group and pixels. return tf.concat([x, y], axis=1) # [NCHW] Append as new fmap. #---------------------------------------------------------------------------- # Main generator network. # Composed of two sub-networks (mapping and synthesis) that are defined below. # Used in configs B-F (Table 1). def G_main( latents_in, # First input: Latent vectors (Z) [minibatch, latent_size]. labels_in, # Second input: Conditioning labels [minibatch, label_size]. truncation_psi = 0.5, # Style strength multiplier for the truncation trick. None = disable. truncation_cutoff = None, # Number of layers for which to apply the truncation trick. None = disable. truncation_psi_val = None, # Value for truncation_psi to use during validation. truncation_cutoff_val = None, # Value for truncation_cutoff to use during validation. dlatent_avg_beta = 0.995, # Decay for tracking the moving average of W during training. None = disable. style_mixing_prob = 0.9, # Probability of mixing styles during training. None = disable. is_training = False, # Network is under training? Enables and disables specific features. is_validation = False, # Network is under validation? Chooses which value to use for truncation_psi. return_dlatents = False, # Return dlatents in addition to the images? is_template_graph = False, # True = template graph constructed by the Network class, False = actual evaluation. components = dnnlib.EasyDict(), # Container for sub-networks. Retained between calls. mapping_func = 'G_mapping', # Build func name for the mapping network. synthesis_func = 'G_synthesis_stylegan2', # Build func name for the synthesis network. **kwargs): # Arguments for sub-networks (mapping and synthesis). style_mixing_prob = None # Validate arguments. assert not is_training or not is_validation assert isinstance(components, dnnlib.EasyDict) if is_validation: truncation_psi = truncation_psi_val truncation_cutoff = truncation_cutoff_val if is_training or (truncation_psi is not None and not tflib.is_tf_expression(truncation_psi) and truncation_psi == 1): truncation_psi = None if is_training: truncation_cutoff = None if not is_training or (dlatent_avg_beta is not None and not tflib.is_tf_expression(dlatent_avg_beta) and dlatent_avg_beta == 1): dlatent_avg_beta = None if not is_training or (style_mixing_prob is not None and not tflib.is_tf_expression(style_mixing_prob) and style_mixing_prob <= 0): style_mixing_prob = None # Setup components. if 'synthesis' not in components: components.synthesis = tflib.Network('G_synthesis', func_name=globals()[synthesis_func], **kwargs) num_layers = components.synthesis.input_shape[1] dlatent_size = 1024 # components.synthesis.input_shape[2] # print('dlatent_size in gmain',dlatent_size) if 'mapping' not in components: components.mapping = tflib.Network('G_mapping', func_name=globals()[mapping_func], dlatent_broadcast=num_layers, **kwargs) # Setup variables. lod_in = tf.get_variable('lod', initializer=np.float32(0), trainable=False, use_resource=True) dlatent_avg = tf.get_variable('dlatent_avg', shape=[dlatent_size], initializer=tf.initializers.zeros(), trainable=False, use_resource=True) # Evaluate mapping network. dlatents = components.mapping.get_output_for(latents_in, labels_in, is_training=is_training, **kwargs) dlatents = tf.cast(dlatents, tf.float32) # Update moving average of W. if dlatent_avg_beta is not None: with tf.variable_scope('DlatentAvg'): batch_avg = tf.reduce_mean(dlatents[:, 0], axis=0) update_op = tf.assign(dlatent_avg, tflib.lerp(batch_avg, dlatent_avg, dlatent_avg_beta)) with tf.control_dependencies([update_op]): dlatents = tf.identity(dlatents) # Perform style mixing regularization. if style_mixing_prob is not None: with tf.variable_scope('StyleMix'): latents2 = tf.random_normal(tf.shape(latents_in)) dlatents2 = components.mapping.get_output_for(latents2, labels_in, is_training=is_training, **kwargs) dlatents2 = tf.cast(dlatents2, tf.float32) layer_idx = np.arange(num_layers)[np.newaxis, :, np.newaxis] cur_layers = num_layers - tf.cast(lod_in, tf.int32) * 2 mixing_cutoff = tf.cond( tf.random_uniform([], 0.0, 1.0) < style_mixing_prob, lambda: tf.random_uniform([], 1, cur_layers, dtype=tf.int32), lambda: cur_layers) dlatents = tf.where(tf.broadcast_to(layer_idx < mixing_cutoff, tf.shape(dlatents)), dlatents, dlatents2) # Apply truncation trick. if truncation_psi is not None: with tf.variable_scope('Truncation'): layer_idx = np.arange(num_layers)[np.newaxis, :, np.newaxis] layer_psi = np.ones(layer_idx.shape, dtype=np.float32) if truncation_cutoff is None: layer_psi *= truncation_psi else: layer_psi = tf.where(layer_idx < truncation_cutoff, layer_psi * truncation_psi, layer_psi) dlatents = tflib.lerp(dlatent_avg, dlatents, layer_psi) # Evaluate synthesis network. deps = [] if 'lod' in components.synthesis.vars: deps.append(tf.assign(components.synthesis.vars['lod'], lod_in)) with tf.control_dependencies(deps): images_out = components.synthesis.get_output_for(dlatents, is_training=is_training, force_clean_graph=is_template_graph, **kwargs) # Return requested outputs. images_out = tf.identity(images_out, name='images_out') if return_dlatents: return images_out, dlatents return images_out #---------------------------------------------------------------------------- # Mapping network. # Transforms the input latent code (z) to the disentangled latent code (w). # Used in configs B-F (Table 1). def G_mapping( latents_in, # First input: Latent vectors (Z) [minibatch, latent_size]. labels_in, # Second input: Conditioning labels [minibatch, label_size]. latent_size = 512, # Latent vector (Z) dimensionality. label_size = 0, # Label dimensionality, 0 if no labels. dlatent_size = 512, # Disentangled latent (W) dimensionality. dlatent_broadcast = None, # Output disentangled latent (W) as [minibatch, dlatent_size] or [minibatch, dlatent_broadcast, dlatent_size]. mapping_layers = 8, # Number of mapping layers. mapping_fmaps = 512, # Number of activations in the mapping layers. mapping_lrmul = 0.01, # Learning rate multiplier for the mapping layers. mapping_nonlinearity = 'lrelu', # Activation function: 'relu', 'lrelu', etc. normalize_latents = True, # Normalize latent vectors (Z) before feeding them to the mapping layers? dtype = 'float32', # Data type to use for activations and outputs. **_kwargs): # Ignore unrecognized keyword args. normalize_latents = False latent_size = 1024 dlatent_size = 1024 mapping_fmaps = 1024 mapping_layers = 4 act = mapping_nonlinearity # Inputs. latents_in.set_shape([None, latent_size]) labels_in.set_shape([None, label_size]) latents_in = tf.cast(latents_in, dtype) labels_in = tf.cast(labels_in, dtype) x = latents_in # Embed labels and concatenate them with latents. if label_size: with tf.variable_scope('LabelConcat'): w = tf.get_variable('weight', shape=[label_size, latent_size], initializer=tf.initializers.random_normal(), use_resource=True) y = tf.matmul(labels_in, tf.cast(w, dtype)) x = tf.concat([x, y], axis=1) # Normalize latents. if normalize_latents: with tf.variable_scope('Normalize'): x *= tf.rsqrt(tf.reduce_mean(tf.square(x), axis=1, keepdims=True) + 1e-8) # Mapping layers. for layer_idx in range(mapping_layers): with tf.variable_scope('Dense%d' % layer_idx): fmaps = dlatent_size if layer_idx == mapping_layers - 1 else mapping_fmaps x = apply_bias_act(dense_layer(x, fmaps=fmaps, lrmul=mapping_lrmul), act=act, lrmul=mapping_lrmul) # Broadcast. if dlatent_broadcast is not None: with tf.variable_scope('Broadcast'): x = tf.tile(x[:, np.newaxis], [1, dlatent_broadcast, 1]) # Output. assert x.dtype == tf.as_dtype(dtype) return tf.identity(x, name='dlatents_out') #---------------------------------------------------------------------------- # StyleGAN synthesis network with revised architecture (Figure 2d). # Implements progressive growing, but no skip connections or residual nets (Figure 7). # Used in configs B-D (Table 1). #---------------------------------------------------------------------------- # StyleGAN2 synthesis network (Figure 7). # Implements skip connections and residual nets (Figure 7), but no progressive growing. # Used in configs E-F (Table 1). def G_synthesis_stylegan2( dlatents_in, # Input: Disentangled latents (W) [minibatch, num_layers, dlatent_size]. dlatent_size = 512, # Disentangled latent (W) dimensionality. num_channels = 3, # Number of output color channels. resolution = 1024, # Output resolution. fmap_base = 16 << 10, # Overall multiplier for the number of feature maps. fmap_decay = 1.0, # log2 feature map reduction when doubling the resolution. fmap_min = 1, # Minimum number of feature maps in any layer. fmap_max = 512, # Maximum number of feature maps in any layer. randomize_noise = True, # True = randomize noise inputs every time (non-deterministic), False = read noise inputs from variables. architecture = 'skip', # Architecture: 'orig', 'skip', 'resnet'. nonlinearity = 'lrelu', # Activation function: 'relu', 'lrelu', etc. dtype = 'float32', # Data type to use for activations and outputs. resample_kernel = [1,3,3,1], # Low-pass filter to apply when resampling activations. None = no filtering. fused_modconv = True, # Implement modulated_conv2d_layer() as a single fused op? **_kwargs): # Ignore unrecognized keyword args. fmap_base = 32 << 10 fmap_max = 1024 dlatent_size = 1024 num_channels = int(os.environ["NUM_CHANNELS"]) if "NUM_CHANNELS" in os.environ else num_channels resolution_log2 = int(np.log2(resolution)) assert resolution == 2**resolution_log2 and resolution >= 4 def nf(stage): return np.clip(int(fmap_base / (2.0 ** (stage * fmap_decay))), fmap_min, fmap_max) assert architecture in ['orig', 'skip', 'resnet'] act = nonlinearity num_layers = resolution_log2 * 2 - 2 images_out = None # Primary inputs. dlatents_in.set_shape([None, num_layers, dlatent_size]) dlatents_in = tf.cast(dlatents_in, dtype) # Noise inputs. noise_inputs = [] for layer_idx in range(num_layers - 1): res = (layer_idx + 5) // 2 shape = [1, 1, 2**res, 2**res] noise_inputs.append(tf.get_variable('noise%d' % layer_idx, shape=shape, initializer=tf.initializers.random_normal(), trainable=False, use_resource=True)) # Single convolution layer with all the bells and whistles. def layer(x, layer_idx, fmaps, kernel, up=False): x = modulated_conv2d_layer(x, dlatents_in[:, layer_idx], fmaps=fmaps, kernel=kernel, up=up, resample_kernel=resample_kernel, fused_modconv=fused_modconv) if randomize_noise: noise = tf.random_normal([tf.shape(x)[0], 1, x.shape[2], x.shape[3]], dtype=x.dtype) else: noise = tf.cast(noise_inputs[layer_idx], x.dtype) noise_strength = tf.get_variable('noise_strength', shape=[], initializer=tf.initializers.zeros(), use_resource=True) x += noise * tf.cast(noise_strength, x.dtype) return apply_bias_act(x, act=act) # Building blocks for main layers. def block(x, res): # res = 3..resolution_log2 t = x with tf.variable_scope('Conv0_up'): x = layer(x, layer_idx=res*2-5, fmaps=nf(res-1), kernel=3, up=True) with tf.variable_scope('Conv1'): x = layer(x, layer_idx=res*2-4, fmaps=nf(res-1), kernel=3) if architecture == 'resnet': with tf.variable_scope('Skip'): t = conv2d_layer(t, fmaps=nf(res-1), kernel=1, up=True, resample_kernel=resample_kernel) x = (x + t) * (1 / np.sqrt(2)) return x def upsample(y): with tf.variable_scope('Upsample'): return upsample_2d(y, k=resample_kernel) def torgb(x, y, res): # res = 2..resolution_log2 with tf.variable_scope('ToRGB'): t = apply_bias_act(modulated_conv2d_layer(x, dlatents_in[:, res*2-3], fmaps=num_channels, kernel=1, demodulate=False, fused_modconv=fused_modconv)) return graph_images(t if y is None else y + t, res=2**res) # Early layers. y = None with tf.variable_scope('4x4'): with tf.variable_scope('Const'): x = tf.get_variable('const', shape=[1, nf(1), 4, 4], initializer=tf.initializers.random_normal(), use_resource=True) x = tf.tile(tf.cast(x, dtype), [tf.shape(dlatents_in)[0], 1, 1, 1]) with tf.variable_scope('Conv'): x = layer(x, layer_idx=0, fmaps=nf(1), kernel=3) if architecture == 'skip': y = torgb(x, y, 2) # Main layers. for res in range(3, resolution_log2 + 1): with tf.variable_scope('%dx%d' % (2**res, 2**res)): x = block(x, res) if 2**res == 64 and False: print('Adding self-attention block to generator') x = non_local_block(x, "SelfAtten", use_sn=True) if architecture == 'skip': y = upsample(y) if architecture == 'skip' or res == resolution_log2: y = torgb(x, y, res) images_out = y assert images_out.dtype == tf.as_dtype(dtype) return tf.identity(images_out, name='images_out') #---------------------------------------------------------------------------- # Original StyleGAN discriminator. # Used in configs B-D (Table 1). def D_stylegan( images_in, # First input: Images [minibatch, channel, height, width]. labels_in, # Second input: Labels [minibatch, label_size]. num_channels = 3, # Number of input color channels. Overridden based on dataset. resolution = 1024, # Input resolution. Overridden based on dataset. label_size = 0, # Dimensionality of the labels, 0 if no labels. Overridden based on dataset. fmap_base = 16 << 10, # Overall multiplier for the number of feature maps. fmap_decay = 1.0, # log2 feature map reduction when doubling the resolution. fmap_min = 1, # Minimum number of feature maps in any layer. fmap_max = 512, # Maximum number of feature maps in any layer. nonlinearity = 'lrelu', # Activation function: 'relu', 'lrelu', etc. mbstd_group_size = 4, # Group size for the minibatch standard deviation layer, 0 = disable. mbstd_num_features = 1, # Number of features for the minibatch standard deviation layer. dtype = 'float32', # Data type to use for activations and outputs. resample_kernel = [1,3,3,1], # Low-pass filter to apply when resampling activations. None = no filtering. structure = 'auto', # 'fixed' = no progressive growing, 'linear' = human-readable, 'recursive' = efficient, 'auto' = select automatically. is_template_graph = False, # True = template graph constructed by the Network class, False = actual evaluation. **_kwargs): # Ignore unrecognized keyword args. num_channels = int(os.environ["NUM_CHANNELS"]) if "NUM_CHANNELS" in os.environ else num_channels resolution_log2 = int(np.log2(resolution)) assert resolution == 2**resolution_log2 and resolution >= 4 def nf(stage): return np.clip(int(fmap_base / (2.0 ** (stage * fmap_decay))), fmap_min, fmap_max) if structure == 'auto': structure = 'linear' if is_template_graph else 'recursive' act = nonlinearity images_in.set_shape([None, num_channels, resolution, resolution]) labels_in.set_shape([None, label_size]) images_in = tf.cast(images_in, dtype) labels_in = tf.cast(labels_in, dtype) lod_in = tf.cast(tf.get_variable('lod', initializer=np.float32(0.0), trainable=False, use_resource=True), dtype) # Building blocks for spatial layers. def fromrgb(x, res): # res = 2..resolution_log2 with tf.variable_scope('FromRGB_lod%d' % (resolution_log2 - res)): return apply_bias_act(conv2d_layer(x, fmaps=nf(res-1), kernel=1), act=act) def block(x, res): # res = 2..resolution_log2 with tf.variable_scope('%dx%d' % (2**res, 2**res)): with tf.variable_scope('Conv0'): x = apply_bias_act(conv2d_layer(x, fmaps=nf(res-1), kernel=3), act=act) with tf.variable_scope('Conv1_down'): x = apply_bias_act(conv2d_layer(x, fmaps=nf(res-2), kernel=3, down=True, resample_kernel=resample_kernel), act=act) return x # Fixed structure: simple and efficient, but does not support progressive growing. if structure == 'fixed': x = fromrgb(images_in, resolution_log2) for res in range(resolution_log2, 2, -1): x = block(x, res) # Linear structure: simple but inefficient. if structure == 'linear': img = images_in x = fromrgb(img, resolution_log2) for res in range(resolution_log2, 2, -1): lod = resolution_log2 - res x = block(x, res) with tf.variable_scope('Downsample_lod%d' % lod): img = downsample_2d(img) y = fromrgb(img, res - 1) with tf.variable_scope('Grow_lod%d' % lod): x = tflib.lerp_clip(x, y, lod_in - lod) # Recursive structure: complex but efficient. if structure == 'recursive': def cset(cur_lambda, new_cond, new_lambda): return lambda: tf.cond(new_cond, new_lambda, cur_lambda) def grow(res, lod): x = lambda: fromrgb(naive_downsample_2d(images_in, factor=2**lod), res) if lod > 0: x = cset(x, (lod_in < lod), lambda: grow(res + 1, lod - 1)) x = block(x(), res); y = lambda: x y = cset(y, (lod_in > lod), lambda: tflib.lerp(x, fromrgb(naive_downsample_2d(images_in, factor=2**(lod+1)), res - 1), lod_in - lod)) return y() x = grow(3, resolution_log2 - 3) # Final layers at 4x4 resolution. with tf.variable_scope('4x4'): if mbstd_group_size > 1: with tf.variable_scope('MinibatchStddev'): x = minibatch_stddev_layer(x, mbstd_group_size, mbstd_num_features) with tf.variable_scope('Conv'): x = apply_bias_act(conv2d_layer(x, fmaps=nf(1), kernel=3), act=act) with tf.variable_scope('Dense0'): x = apply_bias_act(dense_layer(x, fmaps=nf(0)), act=act) with tf.variable_scope('Output'): x = apply_bias_act(dense_layer(x, fmaps=1)) scores_out = x # Output. assert scores_out.dtype == tf.as_dtype(dtype) scores_out = tf.identity(scores_out, name='scores_out') return scores_out #---------------------------------------------------------------------------- # StyleGAN2 discriminator (Figure 7). # Implements skip connections and residual nets (Figure 7), but no progressive growing. # Used in configs E-F (Table 1). def D_stylegan2( images_in, # First input: Images [minibatch, channel, height, width]. labels_in, # Second input: Labels [minibatch, label_size]. num_channels = 3, # Number of input color channels. Overridden based on dataset. resolution = 1024, # Input resolution. Overridden based on dataset. label_size = 0, # Dimensionality of the labels, 0 if no labels. Overridden based on dataset. fmap_base = 16 << 10, # Overall multiplier for the number of feature maps. fmap_decay = 1.0, # log2 feature map reduction when doubling the resolution. fmap_min = 1, # Minimum number of feature maps in any layer. fmap_max = 512, # Maximum number of feature maps in any layer. architecture = 'resnet', # Architecture: 'orig', 'skip', 'resnet'. nonlinearity = 'lrelu', # Activation function: 'relu', 'lrelu', etc. mbstd_group_size = 4, # Group size for the minibatch standard deviation layer, 0 = disable. mbstd_num_features = 1, # Number of features for the minibatch standard deviation layer. dtype = 'float32', # Data type to use for activations and outputs. resample_kernel = [1,3,3,1], # Low-pass filter to apply when resampling activations. None = no filtering. **_kwargs): # Ignore unrecognized keyword args. mbstd_group_size = 32 mbstd_num_features = 4 num_channels = int(os.environ["NUM_CHANNELS"]) if "NUM_CHANNELS" in os.environ else num_channels resolution_log2 = int(np.log2(resolution)) assert resolution == 2**resolution_log2 and resolution >= 4 def nf(stage): return np.clip(int(fmap_base / (2.0 ** (stage * fmap_decay))), fmap_min, fmap_max) assert architecture in ['orig', 'skip', 'resnet'] act = nonlinearity images_in.set_shape([None, num_channels, resolution, resolution]) labels_in.set_shape([None, label_size]) images_in = tf.cast(images_in, dtype) labels_in = tf.cast(labels_in, dtype) # Building blocks for main layers. def fromrgb(x, y, res): # res = 2..resolution_log2 with tf.variable_scope('FromRGB'): t = apply_bias_act(conv2d_layer(y, fmaps=nf(res-1), kernel=1), act=act) return t if x is None else x + t def block(x, res): # res = 2..resolution_log2 t = x with tf.variable_scope('Conv0'): x = apply_bias_act(conv2d_layer(x, fmaps=nf(res-1), kernel=3), act=act) with tf.variable_scope('Conv1_down'): x = apply_bias_act(conv2d_layer(x, fmaps=nf(res-2), kernel=3, down=True, resample_kernel=resample_kernel), act=act) if architecture == 'resnet': with tf.variable_scope('Skip'): t = conv2d_layer(t, fmaps=nf(res-2), kernel=1, down=True, resample_kernel=resample_kernel) x = (x + t) * (1 / np.sqrt(2)) return x def downsample(y): with tf.variable_scope('Downsample'): return downsample_2d(y, k=resample_kernel) # Main layers. x = None y = images_in for res in range(resolution_log2, 2, -1): with tf.variable_scope('%dx%d' % (2**res, 2**res)): if architecture == 'skip' or res == resolution_log2: x = fromrgb(x, y, res) if 2**res == 64 and False: print('Adding self-attention block to discriminator') x = non_local_block(x, "SelfAtten", use_sn=True) x = block(x, res) if architecture == 'skip': y = downsample(y) # Final layers. with tf.variable_scope('4x4'): if architecture == 'skip': x = fromrgb(x, y, 2) if mbstd_group_size > 1: with tf.variable_scope('MinibatchStddev'): x = minibatch_stddev_layer(x, mbstd_group_size, mbstd_num_features) with tf.variable_scope('Conv'): x = apply_bias_act(conv2d_layer(x, fmaps=nf(1), kernel=3), act=act) with tf.variable_scope('Dense0'): x = apply_bias_act(dense_layer(x, fmaps=nf(0)), act=act) with tf.variable_scope('Output'): x = apply_bias_act(dense_layer(x, fmaps=1)) scores_out = x # Output. assert scores_out.dtype == tf.as_dtype(dtype) scores_out = tf.identity(scores_out, name='scores_out') return scores_out #---------------------------------------------------------------------------- NORMAL_INIT = "normal" TRUNCATED_INIT = "truncated" ORTHOGONAL_INIT = "orthogonal" INITIALIZERS = [NORMAL_INIT, TRUNCATED_INIT, ORTHOGONAL_INIT] #@gin.configurable("weights") def weight_initializer(initializer=NORMAL_INIT, stddev=0.02): """Returns the initializer for the given name. Args: initializer: Name of the initalizer. Use one in INITIALIZERS. stddev: Standard deviation passed to initalizer. Returns: Initializer from `tf.initializers`. """ if initializer == NORMAL_INIT: return tf.initializers.random_normal(stddev=stddev) if initializer == TRUNCATED_INIT: return tf.initializers.truncated_normal(stddev=stddev) if initializer == ORTHOGONAL_INIT: return tf.initializers.orthogonal() raise ValueError("Unknown weight initializer {}.".format(initializer)) #@gin.configurable(blacklist=["inputs"]) def spectral_norm(inputs, epsilon=1e-12, singular_value="left", return_normalized=True, power_iteration_rounds=1): """Performs Spectral Normalization on a weight tensor. Details of why this is helpful for GAN's can be found in "Spectral Normalization for Generative Adversarial Networks", Miyato T. et al., 2018. [https://arxiv.org/abs/1802.05957]. Args: inputs: The weight tensor to normalize. epsilon: Epsilon for L2 normalization. singular_value: Which first singular value to store (left or right). Use "auto" to automatically choose the one that has fewer dimensions. Returns: The normalized weight tensor. """ if len(inputs.shape) < 2: raise ValueError( "Spectral norm can only be applied to multi-dimensional tensors") # The paper says to flatten convnet kernel weights from (C_out, C_in, KH, KW) # to (C_out, C_in * KH * KW). Our Conv2D kernel shape is (KH, KW, C_in, C_out) # so it should be reshaped to (KH * KW * C_in, C_out), and similarly for other # layers that put output channels as last dimension. This implies that w # here is equivalent to w.T in the paper. w = tf.reshape(inputs, (-1, inputs.shape[-1])) # Choose whether to persist the first left or first right singular vector. # As the underlying matrix is PSD, this should be equivalent, but in practice # the shape of the persisted vector is different. Here one can choose whether # to maintain the left or right one, or pick the one which has the smaller # dimension. We use the same variable for the singular vector if we switch # from normal weights to EMA weights. var_name = inputs.name.replace("/ExponentialMovingAverage", "").split("/")[-1] var_name = var_name.split(":")[0] + "/u_var" if singular_value == "auto": singular_value = "left" if w.shape[0] <= w.shape[1] else "right" u_shape = (w.shape[0], 1) if singular_value == "left" else (1, w.shape[-1]) u_var = tf.get_variable( var_name, shape=u_shape, dtype=w.dtype, initializer=tf.random_normal_initializer(), collections=[tf.GraphKeys.LOCAL_VARIABLES], trainable=False, use_resource=True) u = u_var # Use power iteration method to approximate the spectral norm. # The authors suggest that one round of power iteration was sufficient in the # actual experiment to achieve satisfactory performance. for _ in range(power_iteration_rounds): if singular_value == "left": # `v` approximates the first right singular vector of matrix `w`. v = tf.math.l2_normalize( tf.matmul(tf.transpose(w), u), axis=None, epsilon=epsilon) u = tf.math.l2_normalize(tf.matmul(w, v), axis=None, epsilon=epsilon) else: v = tf.math.l2_normalize(tf.matmul(u, w, transpose_b=True), epsilon=epsilon) u = tf.math.l2_normalize(tf.matmul(v, w), epsilon=epsilon) # Update the approximation. with tf.control_dependencies([tf.assign(u_var, u, name="update_u")]): u = tf.identity(u) # The authors of SN-GAN chose to stop gradient propagating through u and v # and we maintain that option. u = tf.stop_gradient(u) v = tf.stop_gradient(v) if singular_value == "left": norm_value = tf.matmul(tf.matmul(tf.transpose(u), w), v) else: norm_value = tf.matmul(tf.matmul(v, w), u, transpose_b=True) norm_value.shape.assert_is_fully_defined() norm_value.shape.assert_is_compatible_with([1, 1]) if return_normalized: w_normalized = w / norm_value # Deflate normalized weights to match the unnormalized tensor. w_tensor_normalized = tf.reshape(w_normalized, inputs.shape) return w_tensor_normalized else: return w, norm_value def graph_name(name): name = name.split(':')[0] name = name.split('/strided_slice_')[0] name = name.split('/Identity_')[0] if name.startswith('D_loss/G/G_synthesis/'): name = name.replace('D_loss/G/G_synthesis/', '') return 'G_' + name elif name.startswith('D_loss/D/'): name = name.replace('D_loss/D/', '') return 'D_' + name def graph_spectral_norm(w): w1, norm = spectral_norm(w, return_normalized=False) value = norm[0][0] name = graph_name(value.name) if name is not None: autosummary('specnorm_' + name, value) else: tf.logging.info('ignoring autosummary(%s, %s)', repr(value.name), repr(value)) if 'USE_SPECNORM' in os.environ: tf.logging.info('Using spectral normalization for %s', repr(w)) w_normalized = w1 / norm w_normalized = tf.reshape(w_normalized, w.shape) return w_normalized return w def graph_images(images, res): value = tf.identity(images) name = graph_name(value.name) # if name is not None: # autoimages(name, value, res=res) # else: # tf.logging.info('ignoring autoimages(%s, %s)', repr(name), repr(value)) return images def conv2d(inputs, output_dim, k_h, k_w, d_h, d_w, stddev=0.02, name="conv2d", use_sn=False, use_bias=True): """Performs 2D convolution of the input.""" with tf.variable_scope(name): w = tf.get_variable( "kernel", [k_h, k_w, inputs.shape[-1].value, output_dim], initializer=weight_initializer(stddev=stddev), use_resource=True) if use_sn: w = spectral_norm(w) outputs = _o(tf.nn.conv2d(_i(inputs), w, strides=[1, d_h, d_w, 1], padding="SAME", data_format="NHWC")) if use_bias: bias = tf.get_variable( "bias", [output_dim], initializer=tf.constant_initializer(0.0), use_resource=True) outputs += bias return outputs conv1x1 = functools.partial(conv2d, k_h=1, k_w=1, d_h=1, d_w=1) def non_local_block(x, name, use_sn): """Self-attention (non-local) block. This method is used to exactly reproduce SAGAN and ignores Gin settings on weight initialization and spectral normalization. Args: x: Input tensor of shape [batch, h, w, c]. name: Name of the variable scope. use_sn: Apply spectral norm to the weights. Returns: A tensor of the same shape after self-attention was applied. """ def _spatial_flatten(inputs): shape = inputs.shape return tf.reshape(inputs, (-1, shape[1] * shape[2], shape[3])) with tf.variable_scope(name): h, w, num_channels = x.get_shape().as_list()[1:] num_channels_attn = num_channels // 8 num_channels_g = num_channels // 2 # Theta path theta = conv1x1(x, num_channels_attn, name="conv2d_theta", use_sn=use_sn, use_bias=False) theta = _spatial_flatten(theta) # Phi path phi = conv1x1(x, num_channels_attn, name="conv2d_phi", use_sn=use_sn, use_bias=False) phi = tf.layers.max_pooling2d(inputs=phi, pool_size=[2, 2], strides=2) phi = _spatial_flatten(phi) attn = tf.matmul(theta, phi, transpose_b=True) attn = tf.nn.softmax(attn) # G path g = conv1x1(x, num_channels_g, name="conv2d_g", use_sn=use_sn, use_bias=False) g = tf.layers.max_pooling2d(inputs=g, pool_size=[2, 2], strides=2) g = _spatial_flatten(g) attn_g = tf.matmul(attn, g) attn_g = tf.reshape(attn_g, [-1, h, w, num_channels_g]) sigma = tf.get_variable("sigma", [], initializer=tf.zeros_initializer(), use_resource=True) attn_g = conv1x1(attn_g, num_channels, name="conv2d_attn_g", use_sn=use_sn, use_bias=False) return x + sigma * attn_g build_func_nameG_synthesis_stylegan2 variables](noise0numpy.core._multiarray_umath _reconstructnumpyndarrayKCbR(K(KKKKth!dtypef4KKR(Kn<~}Iq?wmtbnoise1h h#Kh%R(K(KKKKth-Bǰ6t˿Ɗǥ>+/dk]>s^>ۦS>Bb >汖?a?۾ =ா/J\n?\>z?9R?DІ>>pJK-R>ԎR_}raR1ƿpսշ? ?>=L~[ ]Ǹ?S>O&02׼xϿ?=`Btbnoise2h h#Kh%R(K(KKKKth-BOjUӼ>0,?5?P`>֬+?0N]?529YAXQ>@E?j ܿ ?E, JDn־ھy??1?l)?C9>`?3RQ>=u?0>m>}O?8]kX>m=gaS1?y>Wj}4@@%Yk ??tbnoise3h h#Kh%R(K(KKKKth-B `pKa[tnxA1>(0?:?:o??4T?Ϳ>ɿ?XU?p>l>??~?W ?-&??+ N#i?>GeG?? %a>:u>pHW>$>^ꜿo?ciffsǤнU`9VV^pʿTBۡ.K1h?c3ȿ8?Z*>x@qj?o?ؕ  > 9B>%|?9@轠2d Wb>?f>?n)?Ӂ>7%]?45$?O?tN?%r?*>JV??#??0?8?J>*>>̿Hw>{D?,{?0>2 ݽI;6|dF\=)?_@@*B=?^ ?9⽶n˅M! =[?pZ0> ڙ>"?9de#?WxQ'w]=qO?qp_:@s?1?P%?H>$>[>E?Rި?U<ϩ.q?!~>aWǓ?Ѧ I?F ?.+$@??3{yqǿT$Eڢ?޾2?/?J?.8Pk>Z0mc?X>3ĽZpпs?G6?*+}?^\?+??|^ӿ˳x>db?쿵cţ?GX>U>A> ?Spm?h7%;#?vѿ^?C>*>~Ey m?_ſ2]> (?ݼ? ?7 =`Nqp?KӈcO?|%6tbnoise4h h#Kh%R(K(KKKKth-B2*:p?k6¦?7>ܷ>3 :Rߟhj ?)+оWh=%k?!a?Ӿ!!>s@o/? @G*>. ?0,?Y=.J'?(N?FĞ+޾kw?؅?!>ի~ѿ#@k;>?g#5?ɿ??V>MU>?!$Т B i>6DX?!Y>Gjw)3տfpC>@“˽>ȾV=B#vH#=o?(kzXe^}?G1Э DۍzAi|?6;` U@ڢ?+#2j>1?fH>i???!>4ͺ/y?>Qv?r?hEG] \#>?`}?F?Q˿> YU4[(?wK} W?!????0X=C) ?Kh9DǤ?Fq?̿!{K)^?I_9C?;'?n鼹>@qIsOd6vQ?[l0@Y??d"?.\=҉?.J>1>?oÿH?1><*܁d??Qw${>rٿjcjZ?ֿ*K?{%?,jYɾOO?h}*,pgQĒ ş}s?b={u|V>OP&<%???:D?-̾SK?&FM|x?25?NLX S>>?ɿu5Ihݿ=>zwܿtbnoise5h h#Kh%R(K(KKK K th-B?ƃCPT>? >ǿ翰1}???*W?x\>6e"%OԿ[iY?%0$?>??|><,y?̃%&r~?,*M?z;=* =?VSo;xBȣ}>*d>.>*鿪?(qпם?>^D?5@>%>?Jc7(Wq?80s@=P"?vHxQ>-;ƾkFտkV??j?2l:?K- be??Q{?#@#Կ >3=\Ҥ??,D?@1>%?-%? ?2CWsf~I?@>=i>m;?ϿE?b? X!m> ͏?%I? ۼx ^!U =?AM$F?¿HjE>ȑ@vF?q>>H>>>W[X;#[ڻ߾?Ƈ:z -?`J@>PV>R?o@gٹ-?`?ikݾþ5*>-n>P`?7ӽS]=B$j4?I ??-f? ?ۚE?.=]M>=}8p>a]?t? E>G@{?ؿ0a=?-d>IwFT"l?c~2?4?H>3E>"|?X}' mPI?x? 9d.{>J]S%>?8^X|?I?'EPԳh]?W8@楾Y?FKu?h7hh#U5?T& пkg$}:>Iۖ׾?@\'?P?ؗVOFڽ(?G;@"?;Uv>>0+Z?K>=@~foL,{?&{?xo'>Ih5(1R6?/:3?HRk=>>:&=1?X?ѽ?aBv>ʒZ?Ncȿg$㳗u @D?rQe-?-Ǿ.?kKɷ[!>BeH>T7@! ??@Mn>@X +)jp(=7??`ɾG ?խ*뉯>nw,?p2\ H">>؈>Vn%F?>_>p`1?R??r`==:@֜?Ŀz``n?G?Te/ȌE8Ax>?Ý3>?^?Te>&q6U ˟>iʸ7?^>x?аgE?Y?-R?sR;?~ľm?t7<?=0?X>;YKQ[H'>0Ҳ5@+?:?Έ>=%? @+ ;db? )v.>)|efcZDm_?п&!mK?0?Ԑv$pvK`z>„Lz?U?ڙ?ͳPͳ1;p?d>?? e ?*'?o=fR?>`往qA?g =y?Ӏ?v>X?T.:?GP?l{v?`Z%@%?H?Tb=?];>MS`0߿QҾR+ R=>n?#[f\>`WEX>1dB?*?ABoh?2ÿm?>6?miP~$?΁꿨>'t?%mk>&οə6m#/ͽھ?<> ;t?*Ⱦ>K ? !Ԕ=H+iԪ?l?1"?%>8_<4Mo>>J ;3_?M> f°;IBY?J,Ard?(?XNV>&/?3=G=x=>=5Z@7?U?>?jr?j^n!N=?bK]JaBzoQ?yCj?)]4 ?PC?侺?b?l>=c6>D?s4}?y1Eֿ[,;Y?CM ?L?o ?rK?1_ׁ>3?M>!?vm>K:?`?/> >0?LE?d@?:v>??ĕ?gC:GݿF;? ;¿F>~Ms?Pf(>?B}ms>I~u?w>Da Ӫ Z?6 ?CE?x>NT罍Q~`A? 0R?L)3?@?.QtC"?VÆzH??錿#=>әſb?a'?>yc`?>??!>F?(Q?2KF?t?3|>S;>Wʆ?gs_>[i?ڭRJ??#DE. ܙ>?⅗r>C?ka>tpiM ??+=y5?/Q>9Dvǿ:b0:(?t*L4 =}=`?C?SݿE٢=4? @.]y>(,? ?^?,@Ų*>+>>TC_:?V>x?x}?`3F8>Ҙ>/~aȽe:>mF->ݛ]Y>'==?ЕS.a~^>_N,={?o>V K䦩>F~[HŽ,Z>)qx?:?Zg?PE>g[?{[{

'(US@Y?_My?=&?/꘿>;^>Aq?r?i<~?AP_p @,۾Th#2`t`cTںRT=_N?߿žQ>YLS?ԩZ>3|yv?ƺ?`B8>_?*?w>'* ?Kh?&A0 ǭֿmF?O?oخJ[l‰ϓ?!g>ɿ]o5A?W=c(^>wn?iIS YмWVѨ>^^v:?Wk>B8|F7ѿ Uv .r? .`?f+=ֻ"ҞG=ҳ#D]D[n?P?՗?$әshE_3߄?? (>{lH?آ>9?l?ʽ?lG>dq >l8tŔ>?w >B>Y?>Ͼ!|?ov?B־?ԖdVY?:!K?yzQx>i?ӥkɿ D?&g Q46?@Tb?A!%=L&.??>C?c6ҽ>ƿʀtbnoise6h h#Kh%R(K(KKK K th-BCm;>Fs?ɋԿ$hً, E?Q=Mu5R?5Z>ۤ[⹿?Ab㿵> wX?{0o?_>QZY?)@5c?ͼhI?ggwϾb_=AN??$翧X-3>eq]?; 2Oa?F?“8=¡>~(̾W-0Q>?$>M?pL=_-P?$O =?M m?N ~`v?z`E ?-׾;8>Y?ヾ#;%ݼ?fͿW(#?Ҙ?'??0ݿ7߄*>Pڿ"4=G>{`=զ? +? Di]8?T{ؙ0ֿ;'D=p~JER@Oq>wFWܼ&?37??:˾񭵿k>#*]qb.B~?)?-~?@KWt?>N>=2?_߾>6??+q }Vvİ>O6beKaX_"dT >NϿ׶?v?IGпФf+>q㼾`#>>K,\?Y>,>|>U?k=u ?@ Ͽ=D>(E8>z?=A/=IR1>:羹:pT#DlWZ?5 >Rqh?w:x7'?fC9ֿ@$2?h3?S, >ݞ?+?.>d>A,?8$ztd掿Ͽd2?$V?;B!?۲W?ܿ,?lh0οH?1vY7ʿ>9?@e,>@ŕk<?,Y鿹ՄZI ݁?k?۰>оl>Y`w?0q8_?8?2I?\I???;ϢxȣGN?>3?.y?߾G?l @Խݕ5޿{=ɕ"!B?LFdMh?x>]ٽ0?>chqi?D? PŒ.?K~?gZ? @;ֆ?4U?д ?"o?7u=@݊==??9>F:$$?>:?K, =J;?c?)_G>:p!?+?"ܾe#Pak'SbD=5`?[a>:>!3䷖>Z־Of?lhl,? *?:?q?`ҒԽߞ@Ɨ?OG<۾M:㊱? H@V*n>q2@y? ?RRu*~u"hSʾ荺y?Yj">b9l?^q?< ~>:,??n)"?Գ6CBoa?Hn==)z`־w?%?sF>: ̽%?S=g-=G?o@?dz=I-?j<??U)> DϿ4=>@?B>?qYY?/">CT=N{??x?>J>ߝ ?j?B ?ǧ&,\[@% @Y?D?Sbr?C}?^uC?Fܾv̈́> B?؀>L*P ?; ׃%]@27>{??uڿ8?g@J?=> _?=K˄?c>^`?>P< (>;?1x"\:?ڿTU{?N=??_??M?S?^,?F_=,ݸ>L=K^Nc->8?>U45=Ƶ?CZ?>ɶ=$A?l? <@$K16@:V??>֓?5 Q+\>$?Ͻ7. , þ %?M6?L N?AdҿA?Dx?+P>Ԃke?N>/ ?7>JF2@-?dнt?RSÿsۿFp[흿5?d?U)??l.uq?G?Oxy?  Y?Ϳ/N,H'ŽMT%?wh߿½ ?X>#b>2>W>9lpZ?ٿs?@ֽ?A4?z>`NKY߂C>Y3v=.lrR?.4?N>$W?Έ[?Jԩ&7>bI>5l?BHke<}o߉l1mt>"U(n.׽Vm u^>=?,ϳDD#@zت:[G@?8sz>?5L'? {?;|?#'۾ R? ?",>#:?k8Ϳu?, ?A?f$?.a?A#4??~?B;?Vկоnu&e?>VD濘wIR?&?U?m,DX~F>@c>];?#>:@)>?ݩkaƣ.? Am/ݐS-@mn8@ŊWU < $?>sP<>^Y?zzʾRG0>ڿcP{u?I @hCBJ_?[?P8ھ?$>Y!<.W"=Y>*>?4?L'M>ӜMV@?\6߾i =Q?`?F?=莇 ?UK6c?6x? I?B?ۿ@g?XU7:>d<^!?U,Y6A?4>s?y72 <~vĿA?~<>u> ?׿8?`i>)??8@}$!O|C0?O?A@S?Eas]>N'>QAt/ ?PE?ڜ,?ٿԽ1/>\_tlѡ>Jlg哼%?~Z ?`>Y>*@w4-Iڦ辜)?۾o?`u[b=\4ս>+3f;?Wi3>H?u?꫾fc-N%DW<?Y> =O-%,?ݜ/%6&J俤h0??v?RQ_ N(y?Ͼ~"gx=يVͿ @ͽ?ʿh 0@AYǿ?t?AK>ڿmFvuV?{.us>-=FRR@H >k~]ha=l;6biQM?M>ecd:EQ?2T>I?K?nx=NRː#???nٍ=pt?̊?2^ >5_%>]>\w^@:Q?X꿉>t*4i?N?Te?a=DB辤6>2:@NO?}&/IC)!?x-^??^ D]8`G?9?{?j럿tbnoise7h h#Kh%R(K(KKK@K@th-B@0?ѻ >1 wf4ᑿA*mR>|v׿M?>?H~?7+)?pʰu%N ^?Y??Uc???ux**g=)?/q{?Q?13@J1?"@oI;>:#n?,-!@$? B?j?[uB2?ҾzW?D ?@@L좿r>^c=a>)3#߼u!@?Z>~KS>忊<Zp?Z*>V֮??u?Jl>0dݮ?M??a?&b?8>-=*pU>ap? 8? @ E}?1>ܼR¿l#VT?HI@>W4?eo0]>sLEoxk)塱.71ve>==&rE˿ԉ*>rc"j%? e>G?^g'sƾ`ǹ>_<AԿyGbg?}v?5a?6־2_SRK ǀ> żD?cv#>x%?c?}T]>ا?+y?Kd _l8Hܿ{T -^ ?QI?!)@/Ϳ_Y Mx/rhWM= w?]>3kοKn|?]ȾC8f3?a??H9>~Ԧ!?j>(`ڂҾ%o>ES? 8 ?<>g@MO nl?I̗E=?=RO>H޿>Z?S >^L"3&VP>G?`2Zb?PMkZNQ?.MqO>8]Y?>S?y>C$L?#O qoаe0M <\9?sDaA?NnP?J?V?~sD?qn>>t'i>!'6?r=>Ni6?&Ɛ?,徴5?Fn? O]T?,\?j <$~>{!>还vھE@5?8ngC>cRuHz?/ ?0fο󁾮?v? +?TȾI%j#ÿ>;H?2#[?0>vB5D6>3`vjm?Ys -E/%#?}kL]?,,;֎@"v$C?Ơ(???~u?08>Vz> ؐ??g>o(>) N?GJ}iX=N׼^8X!%;O~?r?<>g3? (>ҡg:@7>?Ho>G>:/={> 0f1Lir4K>A&>?I<">=|?Ѿ&?saV?| m8>y? cOf웾S?eı?)?͉|.@?ђ0ooTQ?i<ñ>YЎ?3¾ڗ?m5?JO?뼕> >^0%~?T?`?>>5s_?kb >l>_qws ?@S3+??? =fT9?`<)t佾=d`?K@;&:p߾U.?>MO @;~s ?QM;>=gd?X)?S"˾lտkvO>Dmp?Q?0A?]#ி9:4#m>꡿**w2͡@<{Q>ndh?X+?lS!?J@=@'GI?}jR?! ?@&*ܾ%DrZ>B?4`?9 FjY?t?`?(>?Eh>U? o?[=<;-?]?߿=?dT ?s=?NҾiԿ?I>bbqR?d=ti?b@؟+)?N=TKk??ȅ@>a %Qy??>e?(?Q??d?ҪU?<@OѳF8k̿Y3P?6Q>z?\U?Kr\v>>|Cݾa @3pdd>Fݿr@G>jר{/bW?nG?v t>g'X,<Z?W^7S2?_?=?B?ּǯ=j*ؿo=;)?P? Ŝ3W=hO?.;k>q>ܢ2%Q?#LL\N>CFS:ƿ0?ɾT=b?S>;@p\?R?$>b7=\>:&Q?_B@Z}U5?5?žѫ=m+N)??Av>U 'EQ?[>eQ E->A?wKLj7>$ÿ-t̿;c?;?">{B>Ϳ`$?9.?Qm=OHZ?w7/?㙿 @@I?J=pu I3?z ?{l׼>.=>8?>t5?,@/G>v>IUI_?ޘ?9M=?j96/--><$ܼsg?<>۾Tƿ/?>#=mW ? ?rڴ>Y?N?|>}?bMH@?[ :?aE?$~biL?E@,lB?(Y?SXY7>m#? K?>Zξf,?ǽ >S'iWe lz6> _?ј i??ˑW(C?< ?T>r* ?=#=p_̿դ7nW?m??Rh?g2?WZ>0-վG?vÿY>{+k @1?M1_̿>ƿMOr3>d ?D"T5?:@(n[?ƿmmt^?WA>ȯ?} z>6rC+>r>'#l!>gThx>>@?4>V6?12D>go8@y>|c>8?ا?5߾q>{a>>]?lж? \s >d׿z"i*#S>BZ)O« ?+F?W-?hc#>l??W?W=?.l=")ξ?K?o>Έ?3<(?y?00>@[?x%><Հ?>+ા.K꿉{>j^&᧿DvnD ?C&dO?@>+]IiNuJ'Bg?@5>?W*?ſȶA!ZNl7?+?7Ȏ< ?+&>2 b? Ո 2E?5=r@}Õῂر>}=`f>:,ĿLf,tǿmC~@x??>*O?>Q$?w"]>??ªg?U[>?X?5?ֶ=6烿Dľ]q!?@&1 b.W!hubI?d?~F?Ij?h)1? ==?0r|s?辿&ʿw>wV I ?}>?(j?3[<)Hs?o7g='8U`2?E z¿ſW?i֒)>=/ ?=Ex ־RH۾gL鸿uf8\?ž+žɷ?vc>ۿV׿Ὥe>p'=m>*ܹ??'?b?q<cƾ4??>j4=\uIm}??1̐>26?iBʇI׼u;U>[?&ﶽCnN<{8;>OJ/XNXd>r?*o(./hʕ?bpe?]L>T?*?51?o!ؾ,w>@>6,?Drfn>>,T=?5| R @J5Jf$?,B>Vc=cxo G=gy$u=&G=5I?AD>D>g)˿S?Ӕ/=P?5?'3?^(䁸©?C࿺b?Z? O?+윿ʽa?1>qrLۿW>6&鿪c#i@P[=:aW@?t4Ȉ9Ђ.,=~az?[;;Pf?A?m2Bn>?D=㚿ʻ9tZޠtʏ.??U>??׈?:@ 1?v?!??b?>B?I>i<@`"L7s:>>HW9Φ=Qyې;2a?Qֿp?D ?> >B<=?f')o#@v?U=@UH]i >Pp9̄? ?u q>k^=/?=@'z>vpIQH?rfJV@R>);?go?ڿY>"DUvB*^@Wd/~?o$?N ?CnWڿ/ѵ=Wx=?ܢ>a??}?@WN>w?cCO ?I$?d~DX'?4p=>= ]?v{=E?FU9I?]>==w)h>ⱊnm>@F=wB>S?]H>?b?`?&=3b?ȫv?`>a? ?)?wP\? ͽ?O߿_1>=M7>N@¾㎿-yL? ?8ӽCп@'K?o^?.(?z@pZ?ry> $>]ȿ=&?<~w)/Mݾ˗?q{w=|?!?>$B&?J>Oq<0>\@?>%?D@?m? ug?;;#">nPvW?/>9?)]?h8d?V?PCfп>Q>zɌt-o?yUlO?M4?Mk 4?*t(6?#f@?%N`>"˸c>=JyO? ߾֣T>I@`v%Q{fX:_>y&?oҾ)fbݾ 䪽Ɍf?"XG#g<'Ls}NڻԬ?t͙? ?M>0 n =J7?f?j?\}SK?Y?6?UȿR>"?l-ſA{]y>?dQÿ8h?n>#?>嶾CH?y O)Y?ZX J6E>G ?>qM?QM8t>S^kD;;?I D?0p?O?Լ访d}3MБ?d?Qr>Iο-np3gUmaiH>9 髿}?#[>*>1 >ӂ)eD>l?^p?Śi?.?8F8?~?zF?>@u,??$r1L?S?F߱G⾓QQ?)@Ih>Ս?)?]<ѽWe־ֈ7ھ)H]O>EDZ^?L7?a 8?ľ @_$?F֞?[xZ=z=>Y-wֿõoT*?|J@/1?ڂ5?,:?-f޿;kf?#?x8?%3?ȆO*=IDis}?F?3Ϳ2{>C?>={u')%ˏaP½?1>d=X>YᾇlƩ>s??s*s?R('_}>s?tڕ)s ?/>x>S NglQ>T(? >%ː;1?[mokA?֓[ @_eC?r,>پ#ýGc=e>?B>U?9?>6bb,t]5tPm?O#1?# ?gM o5>W1qBlH'? 0k?n ]z?s>L+7? ?]X;?οG>?uH!-?R Q?C>ըN ?U0m`?5w@6 B?vWᄋs?a0>.)?̸?N?&?-l?Ma?%K?"ɭ=?·-Ecu??>qCCn?0p?bP>Zp&,vݾw> ͩ2>{X>g-=\? 1BF??o4? m>nw|?ʹ?;.?.=آ? c>]>= Uc?`=Ìձ ^>I>=>l74>I@笻-R?o? ?>={)^9 7հ޿st۾a?)SS>> Q?>#>.=%y慾md E>=p/>>6?@h ?'Ӭ=8 nؿ$?>?EZ]?,?:=C>8,?0ʉ,?D @lD>ٿ+>@uK>Oz?"V?o8>n6!I6?=頓.t>&u?m?#`? ??z>c/X"!g>:==[Ej>?.>a:- s3?r>3H> =?R=ͿD*=f?u@=vS='>X ?(M?f3?7O.U=?;>=t?- U곿r랿>琿LK?~>?s>o>3?.mH?);됀N?*t=4C P? -?ׂ;.x\4>uH<6aԴw$m? ӛ>[2?w9>T> g>S:?>?R?q>j?:ѽ^"$?>B? ? _#{dV>=@c?Ͽqct׽ٿEeB[Ԕ?x > 4?\U(.N??O u?Ʀ?=2 ?a >K,?BN?.>dF 6?tA𿑍%"?z=l>m;ɓ?FYO?a@e=;[=c>Z2?ͽ$7hBC>,??i4?QV?Wh ?wJB?7Aeh? ?壿+鎿_?a&?,hy9?<ܺ)0G >Q; S>Ẉ?0Kzx *~*{>V1|Zƾ>+ ѿO?jZ׾q۽=Zk??)ξU>dI#V3?K>>SD?1?%L@?˿hnpU>b?H;?޿5J1a] ?5 S?J/ @K=>?#͡?ѾQ5@0 >Qv?,(1@? ): ?Y>Z>`X ?H>f?>?m>Yr}=6"M& m?P?dǽ̾ ?u<l?l8,n?$p']о{`>BN(>+?f=ߊ?ɧۿ0]A6e޽M?Sm9<x.}P(>:QJ7 D`?-??lc>L=s׭.9؃?ԑ(PC?ᪿ'>+3>Nz>)b=5k?泞>,>9ZʾsoǿLY>Y7> j?bj=|,MY@>~>i?>3@<@?[违q\·)P>J+? K?,?}TȆ1k?@2NDƾO%k_Z䊒>Q~??><} ?>g?QL>G6;u>.b@nѢ?F x>W?1_M'V@?+?o>4s>꘾B>:p?], ? 6ZI?4>O.ʾ>q;??4^?XAɿ Q>j?9|?*@C_>q) ɓ?PH>ZA1)s> ==?`?򫑽\ݿb_????Q\>E?~H?d>\=9G =? BFw>C>_O>18n,??Z?8?|p ;z?G):f.(>>U?[>>@?;>_?K%>$>Һ!O?Oq>2>ve?kVl˨> 8h\?J?-M=(Կ~C?bӿf?#?:>zBKүɿ٬?>G>ptވ?6廿bQz?H?97-?V*?1n>CYc:>{X?t?v??-|CH>-N_@=@6?O?X0@ԍ?Ġ'?9?Cɘ?ݾs\`?>\&>b?Icx?ctK"@ ?2d6d>?2?E??5?ߊ!hhRp#?џ?*d>%J>N@kK+?ej$H>Ҕ??~XDσ;?|a?V >=?4?R>ğ>#? ?>!ZC?R*GG{8?.`/6ۇR=6*>(@NO?~Jy?-ދ ?r?V>V{? ?IEL?F??EqY>Ю?auݿx_kpr?X>1>HA@NŶ>W"y$@$y"ֽ̿4Uq>X>WOr?ERQ?x9=̾N>y5?,G>)v~>ڤ@l??R\e>? 0? yZ>I>yu?Ex?{ e=NANY̭(?[Yi?k? T|??z>}!5?>A&T"=_>H ??*/5FڿU=m1<3>_="[?=?>,|>1>P4?U?-O .@?:?'Q Y?r>7?K?h>C,??L>MS?DZK}>e?l@H>E!O?X?H>V?֕"9ٌ>Y#ɺ)A?y=ehW>m2>܌?U}? >YQ=y{ @vXO?zfV>@M>i?!eb%>>?s??Z 7?>@ T<p>i??Fž?[v%?"𾄰HC[ @,?h/?qnʾ?jNRr?ыy7~=a&@4XWvҿ->ԅV_&?*B=+@8A?rB >1#(ɿ7@f!?y>bA9?|o{>?7̾+ >)X;;j?$k?6P?1Ͼ,P ?r.>5H3?9>ޫ@M>᥼?<I:>2wN[>N>Ni?(t?Nf??B?ZB(?S- V@?MG!>mL<'?'J?>xJqNL?=0?;ѽ˾6? >=>c<>Q?@ƿ/IL==/ѹ?j?;ȿ0;z+/E?Nֿ ?fƫ?le?h (d?~־>wǿ=>! xR?>QwB>In>ncI?o?ѓ?gZ*?̯:?B^>G[?B.}^)B>$?:R?g_>HLB?65?S*?%#uR ![]?=@?T!?%>>Oo~S6U3܉Νw>=>V=i}H ? 8L*5?>ᾜY־J?B},?x>H =ȵ>L> Я; ?f>?}?/QGx? @@ު<1񚿀?G'H@f3XT?u` ? ?2]o_imFmn½!8?č= }h=M9?? 1`?5K&:+?>?*%>?+? y>x.=@?:翞޾ $=轍r+?gZ?&?#Р?sC]?L'?`)=|뤿W>Q$9Dv w >?I@>p0>. @Pعm[;Ĉ`,PYW>Ƚ**@?xS+>͋C?w >$?Ie1ĽK쿨]?Ǹ?3]`?c?d=Z*S?:;A?D@q=v┿=PH,9=T%ܿ> |"?$i@SXnyO= YhF>'[{?8Yk=KU4@zs?+9>`_$nӿ6BZKds?BFI5P8>rDؾz;?<>KH ?qD+?4O?@??=w{?Ӌspn ?bÿ~ ?+>$bdkE@K?Y?+UG '?@]_y(?Z+ign@>?踾v&\ +,O>Kx?fپ&> ;>JYU*? c?=p?V AMe?3==C&?X޾ ?>̾]K??>o.3i[?ژ=[?m> m?a-񾰃k>djf(?}?8@u?~ $)鋿]4?飿O?M9(>&?BP`eѾ=$=j|?i,?ʈ08K ?==H?@<? ?ܘx=->Qۼ?}1>?u?)h}?AiE?NU"@lb>˨w ?3k?P|?=G/Q[ǛaSw#q?3?.n>?֦T>4>8+>v?ŝھڐl?!>>n\`C`J?-?hj=^4[E?EK?QT>?^g,˿ѥ?ioϾƨ >Y?< @w^?þj?=7b?"E0Ӓ羅<?^I=`T>C ?z?=5Dt%?m;#^w#0L?~bdB?F1?%>?G??I}?Dd?a̾z?6?]P*$?q’?J>Q?f?0?<؛>ȼ/տ ?"?Q?l1bᆿ'?K׌??7n#P>ɾݞ>M;~:2?gV>NN?3N?]8r<+|?޽a<꾰h?1y|{=,xK>4?Hk' - BD>z=l?Kۤ?S"Tv=?ڗt^‰?9??CC@y>M[?wk? ?hu 1>R&@l=y?ihXÿA>C<;?ƫэ?6Y@Va׿ebw>:>ٵ?N4ܿ?֧TY6N?!>h ?#UA?y?n.?G>z<`~> :vIž= ?z<#̓h}ٿ!Wi?ɘ?؎?E2?d+M?".w}p?iH^]LjY'y-O׽ʜP?2¿<k8>??] ?eid?>?>!>Xy]0"+㴡>?!?1nN>!>;6?m,L%Ybbj/]?j?Gm=\,-s>s{?]EDџ?J?`Ba5?vi+]><}?Y??O,|">?toL8j?5}_˿p=Q4?_&Ҿ=Ƙ?@҅?f?fy>jt(wm:?@>BQ v?{?4=&Ƙ=\#\>K=V>@dq߿\?NfU>@}?=gߙ?k8@?ya=]![*q ]?Yt?⢢}?g?vi>^ܾssc2?a?d=iɿh?ÿ}?كޢ 8ֿn9>oX:&@xAm,o>#(N3?:?P?5?A8?Sj@$wkL?P%K@i?^fh?ף "ƿY3f?*ڿ>{@{?>k=? ?/>cA?FUHU>0>z:@ÿ?+?*d>P4- r̿B 8??q% 3?*ZIIlA⭾'b"@:y7NWmh>9 ?Z >J<>(N<+ǽݫ?$4?;ſ OǾՎ??}>%H?F1oMj2=It,> ?k[?(65Z+?c?H=TI@nƾxSV׿5Q>[{cc?0?=D="~?쑪??^T? ?Z{#?68?w9N)> +N}Z=?E%?Q熭32T> [@> J @g-?EhFCrƼpy,?1C?9ɾ.Ͱm?>AMY"@~JGVڡ>qNUɾG!vU?>rо?بvL=&-?FPo?>+*=?#NL64ޒu_ t> Dy|?;?t;B6oY ,q>i@>ǽ>>󧽌חw㼗$ݾJ;?>JA"?1#=/Խ_?!SZ?L?9G>OH~F?SV¾z>k>UO?'>$>X3=ti%`e;?7@?&&?u0#?,7>J\ʞ>f,U@|Xݢ?;lh: (ܾ>I {>W @kq[I/? >(? @{#?vS?~?(?}>=>W<|Q@?*?`2?žt-Pw?mlU1AWug>R?/ӽT6z۾Y(O[?Y}F>\??24m?=6q??\a\@>6GSG $t>R>t?⺰?%4^?7ɐ'Gt[>Ku?%%,?(<}??쾆?gmQ "??7>xſ~᝽W½{6u࿾. @c`??t?2Z+?Ås?΂?پ)l?޴?w>l.?#пD>O:?h>My>';?k??8L^ ?o?eT:Cn4?u?H=qCJ0־$ H?/@SҴ>'? 6>T'?\s%n9?[@=?VĿ8`?M?N0?ƾ>0?`s 9@?ɍNtb[@noise8h h#Kh%R(K(KKK@K@th-B@#p?}q?+92 ?;q >.,G\S?Q\?+>»>>>ҽ>o P@$I=+?_8?.96?y??{>ܾRfs=">?Oq?em'?w>AF:l?:nLd|=cy@BM҇dʁLῙ C?RJ׿U8? w?@n>A?^?ܿ?Y^mz>U?M=d( ?b]A5!5?}+t*FL>3by?$??Li?[ {P<1>*ꇿ]/c5>Xb?%Ӊ?Ͼ(E?/Ζ?^N?L?R>kP?leѿ @/? +>>դ?l?~Fc?Us=Z+>*U!?D?m\?Ȧ?\>9Y@m?Z?%sm >R⾙w?ܐz뫻>=C!p?/F"n?۴?d5=G"D?Hv@t[w>-ISn>͉a`=P1>,?:'>Ջu> y7S?{+5>X4󿘮<=u=ȇ?!4?bY&>=ŽE?2>a[>v>?u?__>A??x*n?5{L?>߅(l*?Ohc ?sv?1 >I>% `>>z!U?5WEU9#>k=0OL>&>j@w@>타?=!J)X8>(3O.-?]@=i?Y>y=? ?}?,Un@@,??ug=w">|UeHM?baQ?$x(?ݾ >VE'?)A?`,[ӑB>>|*۾JW>J1v R@;Kuƾqu$=qG-=~b><1U~>@d%ƒ%{,D?'? ߾"2xBhpX5D?X_/?S?!gnνې=ǁD>A`>.>^>:+b?5Sr&?|ٿC4濟^?پ,8?zA?XWd>?=4yIaV>k?W!=?u>:/7?QU,>B꾫3?% %?Z~x+@#`?@aY<$XcAAv>_d) o?=$yT>1=mGj?VP?5?\W?HS!@x\@;?t-n@?+Tr=!6Ge?q>*@{=d?.=g<.>npb?Z@&p=->1z5vG?==޻5>>@L@??ն?q3v_%Y^5s?ލ?"?h> >90 E?-?mI}?E0?>&t)?-U#?V? s?$7?_?vݣ?i2?Sۿ>csXޯ?å?|r :I?.?bKQBt>3֦霿ÿt?iY6/_!U>w06Vn{?)v\?vWᾇt?3C>R}}2ۿ㬐?Mq?0'X,s>#m?^ @W>*gT?0?r?]-??1|jx? ?{Ć>ڀS3>پT?=TE->YCοC?ˁZT'+??4"?0>-L?𾡛I _31*տ g̿' >Fq?Sp-Gw?X=A"?YO>TҾ4.[zlvwS=U?(J߾;?>O;Ҿ>ε>X:0=rv=u%vL?$(ѯ?1-*>^>>ٷԾIp??hQ?u> ?P"~P{>?xH.'>?ՙ?؜kU;?uǨ^־ͨ7JX]>6!L>lE?30>(?c?| 1? >?U>.>!@\.;?N6t<`s$B?*?M>7M\?p?X٦$1.@5>vmۿڙѸ ?.Qu>3iz0n*ÿum?݉FA>5]9Kŧ?0Khz?;q?0?PZC@ ??= FL6=?m >+X;?o?=>?]r? ?ȿ3?Cp?.?HBjҾ?<{$?]O=28C? |=hP?3-|ۿ4?w?A)˿ >7|>̘?->B (l=ڿ?r?HS`?\#Iar>\O9FӿX2>3> ? ]/+.6ϲ?>n?|&>)"p+f?c?S">g>?L%?mK6i?ݑ?S?G!>?!>>Ç=y>?F[€>Xa>b'?ٺZE?PǿܐS>k?h3)?@-?v???LȜCp?e[L90>d==o?́>W&h>n?%=Ɵ?S?/Mv$(=? ?Dپk= vQ? F?r6?.a? ?)p(?"i2?ܮ-U?!=M?̷>?2?j˿i-ؿ́??뾫Pm?d?s?:h@tdG9r@;>?;>Qe?l>#~f=';M>c\f??MQ_D?^>z>g? _?+)= c,3$>A~cq?d=?2?硾}<?=Ŀ{?d'T=I?ht?5>n/?{?>VR?ҿF s5O&?d>oK>VBbĂvl?{?Ǖ L?l>{?]? s)gxOգ=_S>4?%浾jϾ l:[a!> ?Vdt>K(?>%տJ@L6y8ftM&?Fv?O?7P? ?wҘ>Z=%XPs?&&+?##'?d?NV L^_?RG?7Λ?==fL>|>$'H!?=?XŔK?aͿ,n?ܖ?(?i>O_?G?9?_?>=!X?ஷ?o%>S1]>mROu;#>>M ٿŰ>4]4?mKݑ>>sew_й=?3? ]?>Jп"@܉W2%+Z? #[$oœ?{T?$J3>VܿOKi#N@={?7?h༿+K_% P?u?^?C߾уn?J>@E?B z>p?7_p=g:%@M3/B >l?.>er/"1@Wל? ?5ډ=?'hT?Ý?RPO>K>OfH >-K|"?a?>O>bE:?(лRX2V[ >@> ?<ҽyĩ?D?@Hi?[#K?zkҿb >72F>-z/?=6cο>hB?Up?պ`T&=nݾU{l>VvwNa֒>>`?> fA 8?ޛ?Vؾ!go ?[پT?:e NΙ{/>/t?*CzWJ+ @/?751>2" q_2>ؕ?8Ž(6i2`?Pٵ>u9<9>x>&?c@@=R?> >>{?)d?4p>R{1>@?c ???K9ٿe?_&"GFh?}O/VZR?>(4QR=`p?SG>\զ?:I?h{>❿dja @W)?_?0FʾQU5Lҿ3v>p@U߾>+RF D$7E ? l?ҎNT^io4?q>I[l?(>I?2 @~?!'@>y??ѥ>蒽>OdK2>[ =TҾ9K?y?`K7.V*?A?>@8%X'?GFt@>\-/sP?ؾ?`>? 9?|q@?>Q+?qq=I vm˾r?T?& ׋[?_?/6>"߂ɺ>[ў?w!𝿟3=ڛ#~6c?[R-@fy>?=U6>˼,>*/;Gb?Ek>{@+LU?Ԩ;?X0?Y?/(}?×p>d]>?￾ۿ8o? 0=x>y?X/?F>#ʾ5?m;:0?7'x7?>&>,> L=O>,Ӿ.޿q'?ꉿ&`?]E bL[ ?Z>ad$ 6>?{f>M +`^=aB@,?o6ꦿ9S?A!-t=4sP??BʾТEؾlaI?[%[$ yM>'0?t%g? h>{N>jeJ=߀??퓿S?'1Ar2㣽?VH>?7OZ>-ʿ-EY?#𴠿&.?`?< \9ο>c>ɂy+?K>+?(8?n9?v>|@agV?]o>| ~к׿3?֟>5Ӗ?>_$tӠ\ >C[= ˿9h?Ű?ϽҾ8þ^ÿ$=ETOa>k?e?@,ƾ3_ 4 |?{&mլ?J4Y>н@?sv'r>>?}e~&>$?F?c^қ?J񿠪/=TU??w?~?S/E>|+?h11+<ڋ>Nz?q[>!?k3T>GAʿ=>\w>6\?@aofc?M @@V$E7?>l|?a?d>Jk??nW>p\?W[>x?+ _! ??Jh>*?˫?;Qxb?㿓 þi7!@zk?K9>JTxt/0@?n ?. "omR?<| ¦ ÿF?}?"omW ?d?wg>c C4ؿY]V?0S>l'&,r9?&?WP?J?TP?O?+׿aT?;>D$?(l?(sʽ1 o?qҾ9?"?>;?\:?ڝZ &A8>/Jl??#f>q?1=YW &R ;˒=uP0>*N]zb'?J]s2T>h> ûd5;>C@Lb?eKV?[r@^>JnUN?x3;>U?6>48U>mN&y?yA6ž>m<>M9 .܅-3?>j?dR,k?gX|-9ſjN9=K?ԗW>@=cV?L`ž?@jvхCʿjq?t# >>2>e=+4>m6?6KR@%>=3n>!)?Nz۾z>8!=<-g%2M????1D?S@?)6?s =BD:Lf?L'>m?e?{~ޙdOC?ξ|>2W?o?v.?蹿};#\,?1=T>þ^HO>~?:=\<'?2^E??l>uH?4f>%\ >sC?[V ?9 ?aaX?|?Z,?a3?¿b>>4(b1E?XzOnx~w?*@?_?~(maD|=J@?:?5>0>Z^|?}y?_ ]' F>ӿ?O>G> S>:-?:IϿ4b?>#>>=QϿB??>U>‡!* >́>@@+5ھ? ?dO>%=nƿkӨ?߭j()aٹ?١8> =ܿ7?.?@V?ߔ5Lhqo?p?k>>l?'?F>?Ѕ>ZFf?HO!0">=ԇ"PH?D4%?2@l?U ߾tԿ֌? ҿyeA\[ @[#+><["?MZC5"?@wI?|}俱^U>K$<0@??Iͬ>b??v>q|?]&T?]?f?uHrBҿR? =?Ĵ9??X}?0W.>b= @9?ؽ?=>ua @k?C?a:?n @pU>?CC>ܞ >͍?ql>*`>Hf?>l?)>;??#Kw]!m\= Z=o?̅g)>KCS@]oD*eiC?:?? 2?Y?e%EoUqdM? ?({\'?vU|S\??I&?+pzMPiR?sw{?W`?1kk0mfm>0=E=FC?Ɉ>36>l?63wb>C@A`>'ǾHFV2?6Ou?@ >$5L?P?05}_E>ۿd@F>S??A?߿;LW.?<<]@CWpS1:?_> !?zK?q??є==}Y??b+tprD?+=wJb\>* ?5zo&0@.ȿnuAl ?GO,sJ?xR?>Svb6>;mdl> ]L.&?Ӊ@f??Ӂ>!$?%P?4 >>t?M5?>u^>=?.dg?`jɽ ? c> w1$?u?1>YH3?> xVJ?l8:u>vl=z$+?T? Ծzɑ>-߿'?D? >T?Ta??w:qXk7?U>F?Qf?J>#x>?}?>v¾ ?#?ݏ>s?M=S?2>f+?㾉y/ߢ> Ǥ??~`JʨQ?4~=]H?B>/ۿ:b<ױJؾ**ݾX?dn@!@z?7?>p>~F?4?;T)?s?**BA>R Aƕ@읢?t[h?}%9jLc[>g?'Q @- ?M#缱T;FP1E׾}z`?%@|3D?ݞkyWu>& >+̹>=lz?=w>B?lצkN??\?勽a?9&2>S?m.={>K;)sѹgLHLh?eD[(.?<>J?`P>g?V?@>^yU)?t<@C??\YZ>]?]?m?_ȾQϱ? =+L?a>)D׿.n?$(=3-?ιjH>e;? ?"FYX9V>>F'$ D6þ|;^ľZ4?J>3=V @`nd?[Q">LJ>QrӢ?z!?f > >ֹȒ?9{վ4!I=uH?5q2>x>"v6VJ>{5>fC*??Hv/?E>=h]V=B?Lx84S:?aI?҃ފ&'>>ѡ<p>8]_>V?բVqg>Vչ>?Z=i!?(>]' ſn-_yK=m>?-]zޤ?%?aC?&> yVajC?AF? Nӿ48? ѡ>=R?]Ȇ>? ?;:O8Y?p=B:=<_>6&*pk >B죾y&?)>dY_?p<ve< >v>@ >6@]9L&?*~Y=ݗ$>{{?t|?!?,>c?@*?}sE :>N [{>ϷA?4>NJ?\**71>'l?_6餾D.*蚜%>E?ѷy?t3z !%ZQ?[?]g.?gdQOEA?a^>nHԿBJʿ ?<\q ? w"@w#C ?}W>h/?*P>g?%տl +?Vf#w??U(??&I>b5#>ȝ!Ux?$@Ҿ!lz?p>Y !>R=cΧ?Î1>rs?0x=jiÉՏ?aE?{?N`i(9>B>ﶋ?rsx??>Jп:Vl"#fU>~?Ѕ|?8a-?> ᬤG\?3=?QH@XdfÜ?Yr>ߕ?7I70> ?C>X+> L@;z?̺kcԱе=L<?$?Oǽ?zp>g*<3F[T0*iV|>#\0??~?β>d? ?4h|?}?˼ ?e>C@? 6?Y,(@ӾLyB>n>c?_="D>>2O@$~yx>l>Y >??'޾f>}By>`?z?#@O:>}Ëj?ځ;h$>D>"?&hž]>V VoĿrKRaW>TO>U? 4>>?"?i>?DV?#f>;5dz@j:W?NM>,ľSD@J V>}@r >bV?܋Y= 2Ц?>y1>\ $3r?HzہF6SH5?Bm?*8>hJ?=υ/?qҾ)k>?n<>SZ<Ǘ>D?3=N?7 RW&@?뽳" >ڧ' e?}&z?+a>+?0s?>9?fm>2ɾ9J+-'?X3?Bq>r1#?0s?M?2R?U˶οQ=dV?{??#v2Pb4? V?>К>-^K9L?L??8 ?\?f\-B>9??!?@">g >;<Ֆ??5?9B]\n/ۡ?VIN?=\? ?#뿫L3a @4 ?@aG7q\? k?xihžF"?mT=碾aS>(ʾl-=Vm%G~ᐾp?>$4ٹ>?#K=Ɇ>C>jM? >]]&?%!?|I @ ZXO-ș?!>?]>W;g?i4nؼ>T7Ṻ=<пt/?h?t??M?]?[p?pr?$=)>>?yп{*@Is?w0>cJ @)wM?ƒ?|@?n;|??L꾱n?r ?ॾ!?(?f?pgTl>^?7LT[G ²e?xty%@*hSaݿ5D>{1{^>!&=4B?–2>*6þc6 H?^t^>; 1?%>F88x?",>@y C_n=?bI[RC˳{?ӯA?fS=pjx]> % ? q>I>!&f?m?v>Џh=Xa?Mk?>Z?|?44b?AW?@?AE>Ow>>Yp >5~=9?d?O?vJM6=w(oi>u:%?6.Ŀo>! #?  㽛@e\"@D=nH8j>?V?%}>;9?I>'Ї?=L>m:翆r?FQ :>[:a>7x~E?xo>t,_ ?A>r[1cZ槾bRƇ8̧)l?&?)_D?ƾK`>VZͿ&>1a><2wS>35!lRM$=*+͠`?Rg1>6ދ>wQ??t?³Nh>޿O0?F>"dSfX 2&7? ͨ{>uD???_ϩ?Woc@w?wT>^E0^?S?+m>٧>aq)@:Ѿ'`?ecL=俇=kF8g@{H@:?̰rrdm@?_,@?>r?G.@_Al@U?fe۾X߾ނ5 ?Pž6L|>=-N?<?F<u>I݄Ov u?\? Ÿ>n??v>D>?⛫?Tֿۿ`db<?MjK?>R?==YXm{= ̽2? e?dj=g䃾,c?m?Vj?1>?L񑛿ȍuFIĿ=7Ouf=?ԚſǏv?_?l ߾žleƾ@ kz=X>7tK?^Ӄ v?پA*E? >>҃v:?;+8A?=?;G`nc?rc?A ?>"?y=??ˎ?0`_F;aMD? Lq?c<(?)e=U1E?A>]>[=At?%lͿxw$ޑOJ6KBDx?⼟ݾY:(?裿aF >E ՎCo?«Ad|>8>z_=b6#[?auVH,?d>ҔM?>ÿT:Ͽ?/]? ?83@ҙ?ٝ?$Mf?j2q>(cd=XR>N?p=kĽ?0ÿ g>^ֽ>Gu>^Yqo?S?r?P?nG-P?s5mI?ਿ)?C ?KNξ?t?Xt=,W?}"4?X>o2^>>p:6>j;#@X Rb?:?b/I6? M^Pj ?"֧T]>@ w]%3[?t?T1¿csT?U㊿l>g?řD5EO?q?>({ ?Ceq'B?<O=a=!m~e-!?rEGG>숿B9੿?p?f>?_>p>aֽm?ŵ$o|C ̩>3=jbY??5>8?"n@?aq̿G>x,?HV?}D?X="PQs,???6?X @tY?/?N;?}_??߾,i?i??a c?'[?y2Ŀ6'Sp"?&y;p?>8y}2? ܳ?'[Ϳ_?JS?Ȫ?`?/Ea>o"c?/^?ӱ>7q >ԥ?8Ǿ#ʼn ?@>5px&e?EK?x C>d!"?3|&skUu>?S {7?^?$? kr/L"޾/??_+YZya$p;>D>dcM+= >q#=>3G@L dI?D1@Lwe?n?-}?G?ʼV_r H)>ό~SO>L>g[?@r ˶>|E`<_L?'7?=с>~`pN9><Ěտ񕿅&?/?5?@O>&Z?ᅯl]2>$ @l?BH6!{>;.>^H?L_sTo6c=yJ?>w)?c?+?&>A-d?K:>C>9f>l?\6?W|G>ı?uݦ">Ƒ?S?߄<ž?v>*Ipo[??ԁB?/?pNTF?qp˿@HyO6?H=01E9>)> vx?@>ᆚ?y @ ƿvB?WLk>@@v{Lƾ堿Lm>{ ۳a>6M>>~ %L*??.ln+?@;?ST2>!?&cF?-[/?YD?CVg=?t2=Ę>є>)XxտjgPa>jenTWNL>ֿ9?*??e֪ ? ?잴d7 @/9o?VQ~"f? 4@X;>Ez +9>u ?.'?ν45?H>@1Ϳ`R?Q(Ȓ>SB?IV(?*> ھÞ?FصR࿿P!>4C?>ck?gt4?CK:dԦ>h?C^"ę|mL)Y,K?yj>*Ⱦ"=_א?,?^뭽o>3:?uP>2>#sz ^?a匿Ws?Y&>ÿGD?}>Ƹ>['F?8kܿп^?RN&@@>ʎ}ǒR1mV??fM?+=?ڿH>vF= >Q]?x?R> ̳0oj?Al">&2V>{r?t[? ?>>Y₿^@A򧾅{X?N?>-?YX>Q)F?1}>M, <>+-pVv,珿%>:>P?/?{h?UI?GG>6> Dy晘?3,}?!)2"9'>LO7樿? ~;?p=>>K-}>U>4\xY'?\>G>| =a3Zf ?$>t@?';懳?Ž ?k>?a;g?B6B?QNx;*Z?T~ܿ`=!Ǿ=wt?lܮ>`>"?N ?kENPB>?6>$?C:¿M6Lw>=(?'?Z4J0딆V  dzZV>C? @K?9ꌿtbnoise9h h#Kh%R(K(KKKKth-B?> >+b>wN ?=?22a.l>?{nW?a̸ t?M{2$?>Fj>T>2M?rt s;G=5b)%?I8>N6R= %Nmٽy??[!>b`=m?W?B>AI7^<=S7 >+? Vx=QI-O>K Z?>ȾW?j>w?ɽ? ¯ھ>4` @X6!P%㋏۞t=l#x=C]>TӞw>QH?@>+nпǫ9?}?!,>/ɾa!sY?KF?̒q?%>?_U? rF4(>UU>?*==NnIҿ11@^%t-"Nlӟ-?0#?-?V`5y?0ۿWnART>:>X͔?>Ɓb=E>p?)@&|?3H?r h?sOj>&?O@S?'>4Y'>@]>ZP>` QוJ9>g?qJ'ĪV>c2?9@9KEXP?4H)E>_ݿSޔ?`|M @F>-(5N ?>T??Bhz??6?;6L>O;';'?>Ky?I?~/dt?٨W=M,NPL~w'ǥ %& ^@H{ϰ,F~By+2>E>P?m?c?0?Ŗr?m7HE <\@>V?)m?;"yyg½?P̓?NBɾq"??[ؓ1z>ˣa??? ?ڢ?a>B?ڇ?Cܾ6ec?;Hp?w?T}?g?7@V?e7/J3?Y ?/L?~a>?>JNǿǣ$=?Kyr-?ג?F{cLE?W6=i>gR>p??kH`=v8>Gtic>?p=xGy?߇>H$?Z>;?o> P?껾DW>jF?'Ь?^i>r >gB߾!|>H&K?ZGq>U?>FI?u=w>(<T?97F?OM<aE7o ?y?F#>L?a.Y%v>?HF?Jx??zd?7C?z@L=@7Et8]q>"4>R>|~?ۧu伿ϵbY\?ɠ?6:=^R>?>5SŲ&?>l>Ω} >龹4= =ɺ?ZI?$?U?-u>N?x YN>1Bž1=v9? D@)?`/?>C:*q9cU?Y j??7C?7pl"?q=.?8o@'<޾"_@, m4T8?),?G=M?ڱ?}p@@;>M??➤ &(2<&4?)>,Z!ǭ?>) Qjٿ?U'?=?߾=UپhĤĽb^h?g=ľڿ  +̆?뮼\?@  tK?dK>C>ֽ>?=˾q?KkD%?k >%?9"==8?*X?>ƒ<<DR㗿$ zO?s?>&="@jȿ>-F>@?t@)-[D $K?x?q7g@X'?gwzɱξTj=H+>?Ͼ?@ ׽?m?QAU>GOE?(o?0MB>ԿL>41,?py?m?Ճ?MnY?8?]z=R aFYWG!|}>nG>!njv'<=^η}2?`?+?m}? x?:ҿ?V߫Y?m9?ؿ??xs=.x>Y?$ξ<+u?S4>?*r񔎾Ud>>Ն^>T?>w3>]ۄ? Գ~½&_׳?q[?>ʯ>V='?5O1>i@T?!=s?L0O܎?.2ńG^Ǿs? >L?Sy ?zۿ,ۼ{n$$!m=?v)ٜ+Z=d8>t+?Go3?=*< 9?>Ժ >ɿ[|?̗Q`J2?>NX>}?)X>,:L=s?>\EB[_8=A>E}?epaUi?54iJ>K>t>8=x*=>cҾ=>&? ?~?I=L?Qپ6?"`?%E+>[?{>Ծߖ>fJ.>&n>u޿>w=>#?7'? ݾx>̳>cWa2?Ba}\O߾Eu>")dzl?h1ź >?(??G ==ߟ>H?ְz? ?1T0>t>:s>=>?j>I<+?[FUG?1Φ>X͋~yS?K?T?2M^:<:9п띿忨4b Fu>Nu?=%p?l>:b?#Mmདc?unD?z>u=5=l?4߿2d?$?k?xx=̆ ޫS?훾J瑿Tn>Vh?E?ԙ^.'o ? ?>=>t?p??>~ͨThh>??-$J\?n.>?݊_>Fewn辈<>W>sVH{TG?^>=,>Q=Lx>\DF+!=6fݾZAED"S= ? ɠ7?`?>S6>'{?޿:v?] ,?\-@mĭ? F?V>y-?6*0?(??p?n]? ?4g?9ӿ?]׾Ϳꠢ??5?d`??۾ۡ?X=\?@0>;#?ǿ0n0>vyS6?鍮ը?>&+g;?h?.Bt?ׅu9@?7?#>JT,?ÿOg??Wz?8zfa?;n?Tl]> gk:?MnmR1S?h)<3̦h|狔T~?>>F?~??/*?<^>98KJ2#?@=@}>ca`>>?Q<?t g$??+]߾-f ?H?X͑" ?{mg?> ~=i[nW>K)?.ržV $?[=u?s1^J_?V=z>qn>yĿ$?,?>y9a?9Y ?xl ?f;? ?3?M093? $?'Ҫ=<~07R?dWWk.?lŜ@> ?#ta۱?V? s$?>5 L?zm=- 5v{" '\ʚgJ/=D5Z/9r?2?z}>9f28hNcͽ$پQ!kcܾ>@?ȿ!  @,j_\׾dچ?G=,/BI+>ŝu?iG]ҿwr'? h>.>W$ֵ>{e?%l/\?Alš?%'>l<?sh(@sa4?V1?D?q>>,5)z?3￉/?Ilp?L? v? f%??3> u?ꎿB>?U9zG@!?&>ޕ?i>5=?z9`?}̎ ߼̮?𲾧Ÿ~OT?Ù[Z[ ?/*֝>+?]@nSj?=h?;aƏ??#VGl=>[?>Ę? k?B>W@? ӍZBM=?|?K?E9Swc'KMKPQ&.>US??ھ`?M,͝?hd=> :n޼+?Ŭ?ܩd>* @ڐc?W@!Q"v??E 6))?oĽgJ??S?qZJ= @ ?f9];\?cs)Nޓ?‹>ψD? >;)!;ln:>?oſ=Ag?ǾnMK?>C>>B=?Xq1>5a??/M?!?q!=ڽsѾy?ߦ?8?5?q>ʻX??h?Ϳ;??q?#z3@@ؿKB=l'@?$>?y`?۱=ZƊ.a?^_b >; ?3??]O^F@k׉EmCP2ס>Ï>?硿8>.̾if<)|ڿ6m¿.>窿~?MȂC=: q?D7?S>?}}>c ED?*x?E>f>϶JM>b˿yi?:D>?N'lHXz@s0??tѹ=v 2w?k*=>JQ>-g?R">"0u K? >+Կ0Qߡ(?3y?Ex>^ٿa +?k>Fϟ?3j4X?սUW>@SrZ3 =t7Vƿ>>eZ<6? 2?Hkп?8aȿet?ƎD?Q$o1>= =Z?;?aӄB˿k?PT? ?>ΕQ}?}G?U΢T;?i?Xİ?|^2j?Kl?k(ʾ2b? ?&?0f?As̬>.?^^?a?P> x>^ u?R:?>[>yg>?;\ >; 2>j=}7;k>Q?K?-~??枾嶤=>TM޾$+?\)?&>ڠ>>.! )L?Ҿ{ʿච s?>@,@?>?z?ݘS@5ǿß?MB?K>?;$?_T 8l@7h,6KB+?螽>ꈾ?OK?脿mr[=>ᐽ3=>1^a>!@PC"ZhZjrP?M><`v?䣓?u=6>DŽr!?Z>/?Vf>q?\7'>i>׽vawi=ܿ )@?J>NyF]??)~0>Jq>k`z?8>X|c?O]?$Ae?kp?qi#}~/z>z?yپf?̼>P= TƿBiJ?֡)?qqvQ?&W>>&P?>?lgPR?I`>>? ?lξŠG>>4>?焊 t놎?!#C>>M/蠾Lz?$??.?m9?g?k?/>3|>0?. @s?Eտ{槑ʾˀ?|_?M$cZB6<߅=?_?3>_L_>&?@];t?[*?귾>?=e="?i}@|ѿXܾ>q@e)28L7>evҽWtlf=y=r=vVX>>δt->ϡ}Ve>:_FI1K+1%W^\]7tHnG>?J @zT羀x?瑾?3,sWtxI?Dxwa?p5C?:( ?)n?oͺ>*=d+*}8?tq5?K??a0?ד><$?N?< U糾H8>^Dv??ځ6?a '=z9ߦɿ,mN>6a?[7??e+N> ٲiھhvQ>?!3?a @b=m1/>C?¾'?F i> @"?ؾ(3>?ӭ?H:uETwڂ?TY1?3?u> @A"?7oA>7?eO:5Y5@Az㥾??}?lW2%8?wݢ?O?Ͽ?(.@O ž|=*Px?b?^D6?}3> ([s6I>>kV=):?msξ4uŒK-">;i4??Z?M:?`xbԟ%?%:=?Y?w#@G2$X_$=˝>3A:>a"b>j??sz>V>2$5]?B?f>??G']=k ?ዿ\z%06Z@S=@)Q>+H?8 @(*wP???oi<\iJ7#-l&нל-s?? y?0?t?-I=4>?Үw<">- ?J?^?)?% AEE?线<

-:?k ?A];4\*Ŀu&;8?>>P%=%o+?->fu< ]?E>Y.!c֫?MK?;&<5?v?!Li?p4?@@縧>)l??_:/ ?<+?u™?c.??g&z׽x?Ox[?t(EǤ?;\r?g>T?~J=#?p? /@ڿy" $lpDkm#Ʉ9?X_4LG??p?=>s?(>?am

  • XN?v.徵<=pC}>H?A7r#k>>?w?S9n @Xnݿ}p>(M^?'@P?7@>ݿEpT>'?4>y>+?.??KE>mA?Oyƿzfz ?~>Iվ׶A.?1?>>R˰Z ?ÿ,΍C?&(? q?/-4a>b>R. 6p?M,"?ޒ-?Ϣ[F>Q? F<\'&X?e*R?-+ql٘?o2<[l`-(]?I~q7>SnY?FӼp{ɖ#?Pb3?$>Jr?@M?f? ąn= S=Sk'x ?u0G?nY?>?$?wwݿwk>/xO??!*?慾0W_*ć??v>}V&>i5IQ0?u?fe))?z>"n? KB?!d l.?N?6m3= ?zљ=#Ҿ,EI@\6> II=Źm>8?62/?E@iF?h? >[ϧ$3z3"?`@22?=@0>žJc -4?Y>]:MM,?mN+:!?x=@?O&?H>2/ eQ?`=ł<=#O>5zk?_W@{?0&ҳ?41? 9>C/WھV?y:>+{m$?Xa$IK=OԾwx<ѾM?^b8?H>+1hMI)?mCRuG?G8JNc ? j?? Pļ*pA>C?2>>Zr5?V'??yv#els'3??= z?#׾_?KW}>glߜP>_(>6?=Mb=>?5&X>?r>$ 4bΈae?!#???E>ƅP>Պ[?;>) 6.{\>U Kw>%>\Z<^>J~?L]XA&;k?cTQ0?gq)bY@7?)?5M> >jXQʾf?>v?ViYS?qU(?P_?5^1?VZ>\>Kۃ?=y-?l?2ن?Uſ- 2>J4g?@};>Lw?ḽe ?>f'?l?M}?&F?ſ?Kx?U>6|=)jy!5z?a?i>>3> ?:aħ 08?6o%c?Xş?>/A9Q)@-k> d?>>+ >>}2 @V??)? =m` >0>]n>TX؇=nP>7??V?Cп2Z?&3(?<}(? r5?>=SVX= p?ǿ=,cY>>$><>=?@?>??2@Ē>>v?6:[z ׿%CQ=s׾ @ >?K#=Š<7M;>?ܱ?^=Z>1?\??a>~y\w>D@ ?u=\??@a<􀽣҈?X̿vB?sʹD}G? ?ܒ/?[UQ?;R}(5>M>=?4M&aR[?i(?RK?[KgdG?խZb?*_얾va?˪=?Tx>Iŧ?4?"DSс?F4X?Mئ>ܤ(MDy?-xu?Ծ~>Ӿ#t=9q??>p?3Yn?,f?LK?i@"hNB{Rj?(v#}矿 P?q`!> 4^ Fo~aNWvz @o&ɋ=U=p.?:Y? #?AB>遾 ?m???%A?>p }?9.Y$ξ=5=}?ԤGN|X/)=e?pN>|>$A?Kq}>Ɣ`}?Ez:? [}>QEJh?Bʢ?J,>I?;>ٻS˾c?[@?,?5?ܿe >>=?p?Dt< c[?1>߾^=;t '?N\mR2,?~%*t?Yv?8d?E,>{P?Ϳ4F$ow>@BS3L?<=B;?j2>RĿn0?.k` {>'7?$4ؠ>.r?W1=fCV:JB?I>G/ ?Ou>~nي=w?zG>и><>vٽ۾8>}k?`? ̤?JV{Ϗ;뿙=G|R?=R򆷾跤? ?)j+L|=Q| ӎ?-t=J?H;?Hy ?G ?@;@ vl?T=9>v]>V&2> >'}W?R֙>L\?;J4g?s=X?NrD@ 2>$R@ጽ0??#?¿d??%;bR\ ?%A='hZZ4 q$0ȷ?1J_x>K??%?K>Et?̬ҽmo,ٽ=v/>w?aF~?+>?x<> +=m?T%@?g?zĿ74M+e ?mό-8(<:'(b< >S웿~az?Y'ҡ=2Tr`m\6ߞ>p?* 0>>V?G!:Ѿ>E?qYe˙; Z>k/>>J]0ѹ=swO~l K6t ?A$$w\U>O޾?z?4W?/Y?!H u瞿1?I=e1@5>  Y?DZ>x0?0(>ܢ?R8驾vΤ?2?ն2=& >XmAͿV>V@}~>ȕ?I#?83=.,?[4?7O??\?{?̆6Mp*?>Q" &k?lVޛ=B\8?1~l?4?"Y?3|gB??̽=-^zL&*UYKп6<%?4J?]Yy3Ƞ?Q?|2! iQ? g>ԅ?n3?οsH>:z?jG@? @ſZn?6뾘ʾ <?=_3? :ǫ.1^?뾃?" HӾ \ @z\?=hS龯Z=9?+ŋ>LA?M?Z>㲾0;0? :ɣ=޶`E?/?XooU?e?B?;B-[f6zR?ӕ??ZYh?{?Ѓ?iJdF7Ʉ?SB>1oY ?K[?D>i n?85D?55_T>V=mY-w>P࿿[:?5?J??^^п?9J=qGڿ^@j8t=>_}Qfݜ$&>:AVsl?Yr ;o]<#sr=vd;[k>^0?Eb?@4ݯ?u"\"Kw>NCT?c4O>3>-@J`<*R@:?h?U_?<ϿS "hF>k>%M>@J\r= ~#?-.?8.@o?Pv>Q迖|[?&2J?q>5'>1qd?e>~B? 0?KӿW>󌨾pH?Q?<;4@m[]jTͫ=U0 ?7Ͽ/?">L(A4?0?ߒ??&ѷ?Jw>:Cz>'@ȯ\?u=3@^}`UX<8=@,;Ζ>pYƝ^K?w?&D=p?*h?)3?#ﯿHy>|=Rn?8(w>Jy>(S|?Ŕ?ң$g?לb ?<4>PAt??b:?ܾo_>$_EU?,́? Mj=?yh@=":㔟`񐿽5?6?Ͽ:]=i)Wp?V!fD{@deHm >?I?1Yhx\̜r== T|?>x vMO?b ^>'?*?,?Ѥ˯c$ѾcP 4pz?h>>( C'P+-WWh@/Y>sf>3=@ت?Ǿ=>`?=^?^>=mMm?h?2Қ?|?@K=?ە>>Qgz?zb>%T3m?o.^}> =J>?_8?q>ɣWz^H^gᅢBcKP??I?(E>dJ{?_\?.R+>?# L??"I?Pm>TZ>W>O?];5?;?(?3?XP(>:H)=?D?|?1Xl<)>M=?x>{78?cF ?> ƿMcV_ }&ҾQjO>h>h>M?>#HϾF>?gX]辙=9ZHW@fN@W [??^g?yoGg߾R @;>Ns"a?8. z=#??s|<\?U?)ҿ(V?jb>?{^2 Fw6࿾&j?;+@2?ˆ $]>Oo?{N?„?Mw?ݿy?&?^F>>l>R0;N?eZ>l?8P>̿ >|v|?tjoQl?'>AN?ā?kRTAr>OS=wJ۾5?@>?2\?s$I?=Ⱦ>?fbTt%?^?\1N彈>>ZE?oJ@>;>?TSԾ8vp濟=>f'R>Ϳ7T>$!?Nw?d?{0?i ?eu㾩H>b^VNpſp/ *[Y@?#S!Q?b`$zD>pH#@SZ3'??B?[3?܂zs.?R^8"??L?>Nc=C>?=\?IٿlZ>h!?j?[% rKf?߿~<ݾ l]I?#H?:a$ѾL킾Q?\-??>*J"?Ը>?ϒ v Q<b|?hn>G-m>?[d2;=D?, ?1vU?˰=EHZtk~6:?@ӾG<>Gt->Aj14K;?[(=P罾x>-+ޔѺþ ->?*枿A@:?L?Iʀ>+^0<ɐƼ?ԠQ?K;?< U^g>um{]>$? e#>R)b?Ks#?nL(C]k@?e?'nӿVi>?o:a 0?(霾hTD14 9L%?7 ?>gT{?">;=M'\x>Y>?#>?B]p?YV>ͭa?J]%,>{Իe ?)Tg>wD:Ƀ?# 4HLſ:p^a>\O?D~u?y-,q?>ݲ?֫jr??_ȿͭQ>j@??*#*>< Qe\@-C?>/?5>y@? ?R>?qJ?FϙSQ@'ϥ>=?gQ꾲Z?> ݵ?p"ɘ=&*$~I?/ ,׿Pв??㓿?phCJ8?Ho+>zqlZx"A+N¾(@gw?ixlI-H5?5S>?WC?=->w?⧬.O(=Yw@q>,1a_#ڊ8v?Қ?Mxt?%?~.?>\U{y>(K"#?v)SR> #?Z@\_='>*?zS\!w?I'?!4N=0ClN># {?Vȝ?@྿YA5}3?*%1aN?QF,"ҏFsڪ*>~><^q3!?ʘ>$ >Dʾ2HMD"_xS?>Ap?VOC>\=fjT@s`qf?>_6 `1>a*6`A?A =ʫk>Ƅ?v, ?/˾R??֫?i<׈?оj? z>~?g>< P '?¸A;y?3 A¿[?O0&%*>I-C=5|?DAʾ >mt ǿ =?EȿJ?}?D@"5'ҥdO+e->,l>sG770z @f/=;5k>EXj.?H=N%?v>?h.>?v+.>27ǿE>#-6G>A>weAWV^?t%׿؇K{?!cT$??l}C?[}>I>x=I7ŽP& ?6-ρ?>-X;Ș?mտ(`%?J>?2??H:۽?Rx1?C/N?f߾!#0M=Fs4eӼ(@w??J?\W?+L ? Pʞ=Q!|Mb>?>?᭽"?׃Y=>>+>9=Tοc̾1R+?g]J>H>'*>20׾(0=v Ī S ?j>mqiQ?#øf?F =4-4>`hji"v?O{@\2ľ:?Q+ee?uU>o>YI bܿt>s@wz,{?H>vH?.&+&h>ey@;?u>L?0nT>D] ?ğAA+?~ >AS`?Ͼ S=|W^?TbbI@1?@&j?O6?`^? #=@ml󋠽)?By6?St6>x>' 5.ͿƁT?}}ȑ# ;2=vBFsߑ=䶿 80⽶?qQ=(?- @?ڍf?r.m${?Wվ??J??V>3o?<ԥK>5_7>85>> i2?{'J?O=ON,`E@aҠ=S؝?>9?q=`ء?U>?1=TUޑ>#?PI?:?愃?Hg$?pԾ>!ͮ? >V U>/*H?T6?pn=~?C>:e?j???x z>kQ>U?؁qo(>X?m? J?5x?2?΀S?>~2ob?黽??>Ɣ?񾺌?q6?ׂNNھx Yɀ?ϲ;턽=y>ĉ@j??m?cr%|?o>@-?ڛ&~?Όg>rmJs>j">iY4V(I( gࢿIӾQؾI|>ȝɾBv?D>bE?| ?y?+D#? <_>r#?Pem%J*L>z8f?V?8>dQRC7=9f-^G?S>5P??L^GȾZ?c¾>c>]{~>t?q?A^.-ٮ?~sNN?=CK>AN?>$S)@19?:\Y&c?@d?  ?E?п[5>)=={ɛ7P =??E; ?r8Hz??&=XZ'uJ=@>v?ݗˬd?$?dK@??C>R?.p,>x>[ (9@?U)V?ٟ?n?6!+*1?q8-?22<οW~e3?xÿf>-__c>Y}c?1 >а ,??,ɗ)?ƥ>e]f=>_?1@r?^.?`=t>!>i=y{%7nRc>X>!#S?)4ޔmk?JG>?빗 K?ؿ5]:'gOL?4?uI>I>?监>10V?3|?* ſ]2qχ?c6>0:dgyl>{2SR> +>??>a=j?~()ܾ0=:v?p/?U?J??}NA?.>?4 b}?L?>/@^ס?w[?p?~?ZgHnG?^>{e<`?!+?e'>>N>-پ>?|j{?h?̟|q}?8>V?>\N>T>~@襾o@9K@:'?L?X.^ LFxK!^>%>[>ye?b>,'K>@EnPB 9C>쫿U:>+? ?8e!l?~>|v h0?:r?@UK1?f ?a=L?.eB?o?x> v?G>_+H` wt1uÓ?LxaI ?>MJ2>þ]IQw䟿 S ?t>B'>>K?=*S?8>>p cھs};/?Yd<[ D?L?b?_COɿ`]P% ?P2?뾾>?)?b?@wn>.˿r >ҿ=`csTw?)nesοDQ>,}Wm{B=W^Igfƽ?fA>5=$? .??nXߝ?JVs*>7=8@z߾LiA?O??[?{5T? Un>"?u#?>sF >=??Yz2 >26?>"?V|MP?F>@׿f?? Ϳ¿Ȝ?x>=]6t փ?a U@J?bI?>qD?(t?,FItXPN}?XU>ϴݿ3?5=п ?&e??x\:?%>?W?yV?ճ㴗ӹ>Q?˾ %'?ѿ>bW9_{ `?ڮ=r???F>wf+ y>w=!=>U?#@ 47@$j?ո?.Hk?rZu>=V?.>@#:?v>JD>'w\a=(Tb?(?HV?)Ν_c/eʌE=[hJ/'j>]??>q6?7?1^ 1"?A?TCj5?⾖>!?F[!Q;p>;"< @`#=? > ˽k#پ1e0?PE?i7m6? ?.?S-?@>Rr>etrzOC>F?K#?\? =WsSľ@#?3Ῠû?m?;ՠ??2d?X>b7> ?k?N7>l=!t?SٽM>YC> 6DyؿFzwO;A2`>4b?\Rپ@8P=?9,eyX?2A>FقV^;?ݿ~-?l?=/?7?sۋ>>bG?+>?VJ6?1k(ƾ7ſKwT>rB'?I0&IO>PO7 >3 ?>>95k?`h?즢?S1s>q>|@JT>W? چ;@Pm?D?8 bAːr>y?>Aa߾DFˋ?9BI6ZW* ߰ǿsY?U>U\13(>1l??(em3> />L(>Y<~D~þ+>J>>b!???J>#=|]Zp?k0Գ???늾M2??#?R]?I,AV(ٕ>A>1B?&k?a?7?=+,/L?_`h?G.4?B3?a?y9>>>y= X?OȽ`@ >Ϸi?'fF?&@\g?wS?{ɗ>޻ @eP(Xa>H?dņ-b=?: l=̓}X2?m}əX>=>؎!? l7И?s(?G7=ܥ>x>=sG<y>>pAM ? H>~-??KtJ¾PȾ%7?#?}1>HQ%q?npm?g?s?k[? BO? >?)Rf?x> ;N?`>jU>wQ>˦;ots竿?(b?]cV\F=l>>YS} #>X>\??b+nW>Y?/ǾC,?l86?M?s|U\a'ܛX"-`a:m?B?E#!?W?}'ޖL?Aj? (h?u5?L><>j?Nʗ0>@;&=D^>cyozb@g %6G¿@νӿ'>t|@Mf=eǽ? qs?f?%pCy?B>>2&b?7?x6f>m?ao-̹?ֽ?lڻwɾ>V??)>)+4d>3?徟~T?E=@?췽OǪ= K?BM?2E?>\ƿ\n D?>]'?e>ƿ<{>"!k>K?nPs*(ipyY˄; a8>N?ȽW9M>Ĕ>Xa>֚G oV==a_Zf`?eˡ?!@%>M @Xhd0Q? > r@^ZI>[҆D ?W?g?G?=q>Lc???p&[>%+'> >ྸ?~J}??H=?} > ~? Ŀ ?&???2C]?;[?- _{sֆ?@>v/>>5IM?.>!rL?쎾b?2>>c?zw>?^0?x??i>/뉿I?|? 657? Ԏw=9b?ݖ>P꾾&?tp8coZ>E;!P?cPy9<xݙ?>a?wj? >?P?w+7?a?nM?5@ SNm?![= ?]f ?X ?5~립8\>,?^?<@4?Eܿ s#z4?N?= ?¬Z?_@׏Ƚtɾ?{‘yO*> C& ?/p?\>Ț?ῑlB>,W?r<f#?3@!XL?RI0D %>/〿=P>.c/9?|=ʽmA2>L?ԿOh͖=d(??fd;=V?3^>> ?bU+@>?9KceV ?Á>8'p>!?1>?3?it<|mٽ?.>ϯA?D$U<1fG?U?x L??܁>O{>?s [??̼-2FI>D~qU?P @=&5?(?Z<(ֿZ?n:+D=?;n? ж?dKN?@;ż1פ|Ab =*Z?7&?MIϾ=?d?u?"Pqa??s?K?/ǭeO>qI>(SRAٿY>Ҿ$.@Џ?ux\??hȂ? GOT?=?VN>^]>Ũ?<>'>;?Ϩ?ȣ.=K=ִ|p? @RL?Q[g?١0M0#L]>:8ih|G޿?cz??=gK3=?tȰ+ ?ѿ$r=>ؾS?Ar?V:'[?U]Ŀ㜰gS?ٿP?0a)>7N>?d?ʽ1.3|?+>fx˾TBrN\? e˧?J?J=z*?>yM5w9KYXh>&>M> (?Ir?y ?"]P??þs"?FPxtO?>)fG˿Կ%>=;?*T8x+֎d/?Q;o˟? +>*进#:f>Z>i_$>.j?>'>-xſY`b@政>ʱ4b*Pv3?ZY9>쵇>Ә? ?(|>ı><&?]O>C?Ӿ۾kلRG?ҿt!>}4??{. (?"?ɜ?Ъ۪szF_?!¿K`D)?f?j?j>.?>/ ?t??Ի?/`>>?+)>ԟ?"@i Ze?@@??4?`?q9>Wqm??VսLH>>&&??ZC+2i?Ô~ ?]w >F?߾9迭8WaV?R?[6?cɁŞ?Ή<4D>@ B1s?$?v,f = XS6?>Xٓ\?j2޼>C9?>$ B.H?o ?۽ A~_Z>++=:>??: Vg ?i;?ح56cIo*>^:?}d?Sݾd+ﶼ]k=@P=a?z4w zS?^z>9>N}?ſ[ʾ'>w]? W!?Wa>i?噣>bt>w?7VAD@^þG&? ?->;h1x>.۷ͭ?_QN1f-?Jz}ܾ"&??MV>(^?2W2? ?/t>? *@u1?N?a^6?*d? ? ¿~=2y=i|?qd?^.Z>?P_#i=-$gDž&@#\i> Nl> j]>lrN>BrWֿ#]&?K}t=9e1>%?rՁ?>*,,q=N4> Z=J'yտY&@7$xw=-Sh?,\п\$>+x??-H?'-Xn,?a? 䇾|?y?$k?>.{= 0>H>ӵ= ;6S@|ƿRW??0(D?L?QXe??(l()U=,?2k?8??µV<Ņ?ieyk>AcȾ6f?f"O\>Kx"ŝ\О>Y) (3Jͽm1Ld=?I>C½x=WU?Q]>Sd>8%?Ф)?7x?.? m(v=Sе?@M?o\??7w?a?l޶YɾKHnр?^{ c@s}2S6> '?|8?Z)?@˿?Ҝ>E9= [B?>v P=м5?N4á?? -W?-Ⱥgs>{>4pc?w%·2>L ?[ڈpȻy?裺>Lꆿ F?/ԯ =TXŊ?s!|(*@P?J:] 8!@=>"&?ƶ>E$?B| 8\*??n񿙾?21>`M?ئ*?(9=2 ra>IxeL\z?z, c^>V?ʓYn; t1%@]Y?ܾ{NFD)>>>uӠ?>>ǿ>̘?'?/?K;:t y>i@}N>i=@䃑? ?V Fo?$\@>%~ؿ'h ܁:1ÿkX2~\=C?sYrrÚ>8>K=CX5@?zu\?iÿ6?J[ss>>'>OA?K;8,>QCc?ŵYC>ݍ>`!?u8Ub<< v*?M?(?>{b?So>W?^[>?`? ?e Q>f=Js3+?6ы?CS?QE?4s H>} >`{+@?z>.)?ٟ=>̽筥>Tɽ{d>R?I"ᄆ?RD@y5%?j> ??/d>G`>=𾯄`>%~>)5?N ?^ ׿5¾>K? X?XI{ >$v?z+c<G?6>@Gg<>D*>24F' ?U{2Lv@=Qgƿy~흈?7?,K]?㔿K־ch?l8?ڥ=N־Y?(_>þ>\o|?,dվE+(/>m2ܽ(hH>@v1?' s ?R?Z?>&@ֿ<$ ?hݾ"?>5*@p+GV~ak?>>=w"?|UE?8=nI=lľ[/-K'?~?wXy>>ר/>}ȾL$@5 }Զ>?>P >Y޿%?0(>uVp?*v侌m׾+c,/:?0%=wGxړS>=3 [?w ?=~:Cת?oտDf;Q 傿۸>ھ^Ҿy/;6>$^->װ??C? > []=^w0q?+ >oFGԾ)>z Ӿ>V_>?RX>EſiA~fվ%Aܿ,[Eپ,ҙ?=?^K>>C03ҝ?&!j =U?Ɖ3l?.-ڭ&>Y:J@w>V?권?_?>8?ߘ>tw>D ?zA?ؽ6?˿%B>ݣ>Wz>% @Ǿ^>#bT]A yg=I?鱩IT٦><]Jo7g0^}[6 I?p]Ƽ@/Do6(f?EM>7?mW@Jy?YƐ>U>bYn?~ɾm?iQ^#Ϫ#\ ?aL=J>>\T\?&zNH>=N޿>>OkM!O^do*|b=/ =B}]~¼)@`v?/?OI>?jQ<2?q&[?ؾ[sUXl?xu&o_N׿&!?o?=?4;^V?o3|?Y#SS;Ĩ3*?Ue?m sM?&8>@cΊAM\%?>T lؿH=D+x?Ⱦ&<(Ciٽ+T?S>.}<5Ny?B?Cd8g }?^:, u,?cɿ꾿7H oK?Or6Ʀ>?4R>HQ[fw>(@0c޿>l>S<ǾY?/cgM~Nt F?uW̿ C>>_}>ܿ߁ @m>a>2Jj gԿB>=ͪ?N i:?R>=>\H==?䕟??9g޾[+ ̄>?>3?Q>˒l?!>&0E @Tya1?Ȓy?j?F=Ȣp(?o_?xa >307N?uP6 ?>%?>e0Cx?1hq?>&W龓>S5?{MX?eb̸>%?> !ѿ?J ʿV?[r͟k>?OW?#.*(쏾J>< @3++Ԭ>D*W>1-y:1>x?-;? _e~>N-> ?dhj9ݾPȿQyܾM]?>!}?MO>?O;>)%?i>?R? =tB?>};@U/?"߿NeȻ>0u?<̿ >g?#G`-*#>ߠO=pC?V"3>>?y:? q0C=r!**O=P?r>X=qGy?rӾTp?+=?"?c:>9:,$>?^?>7|?f`[J̽=_>(fa9_m?UM.>?YQڿc7HD?>*??m]ġ?[J?ġ=.??{p{> n?Ӱ?п1 )><_>C?1?p?;X +=>VоKc>J@ M9-)?6>>0$-)t6?m?G? EnJj3??Ә??5?/J??̾澗Ja;-Կ>4>K/?ÿ6m?^%@A?μV!?!B>cf?e?CV?2߈Mۢ{?҉?ݯL>{8?쾮>B?~3M?)5wDv>=#3?@ @9?Սщq?L꾭|X2??ʵ+>оPG6?AgUd>D?t?no?IDe? ?"M:)!A%ު(?Z2E?Ip]>>[40?VB>A־歾vh>-NA?W>>ۥgYVjG&/u ??}>i?X?$_\?}v{2c@$+IZ1Oݿھ+?>>fؿnY;?#`H>>*7><:?|q>@q>NJ< n?>.TdTAr ˚zo?>?iV>T?$?~w?MN ?&i<(Vt>j>p?;(wbx3?@́?5߿\'s?.?(cF?qhD=9?3 *>[j?J6e?=CPIsEް>]R8W>IiR)?Bl>D<>HLg>5O?P$Xu?M(\+T>us-wjE?/S?Iاx!!?? ?o*?V?޾>Rb>v?P?Zr?v*#@#Iv>8?n>ȅ?>߮?'T+<u?ڃ{䩾R f?;a?h? ?]?9恾!zH>)H'?Fǽ7>&<;zǿ+?6?fM7Zq?L=\ ?mM>m>ƪB? ܖ?1Y?g Ly=;ݾ UvP?Ze?'ڪ?57Fw;>Mw}. @m^?P @Aiqoh7%?kP*5Q>K?G":#\a# ucɐ->:Řd@꽲>)?þ֏z@by@F:ႿRMQ>r???¼Au>5gֿbbM>`?QbH>̪?(?>?п>%>C??ueM3>\>]Ҿ5A?}إsp>=妐K$?=GXؿZR?>V>X޾P$?˟>5g?/Ӿ>r?E?4?=.+}[ɾ_B,l??4?:ؿ>=%? ?WSe>J?+ih?9>">>gV?c&994] ^/?j?˿xV ѩ =B}?~`"i>0=3z?7ř#GzϾ?{,+ @v?:g?; ?Z/;i> PE>4lG,<#k+xƴ=zM=>mV!>ЈT>5?JS& T>F <}?5=>|M=D?:]?̓'䷎?֓?U>d>X03>3?N=8??هaJ?s¿a^p߿ZLLEg H]N>/?mo?ҵ?*È!ъ<٬?,2?^2?8(?!eH> ?Ãs>ؿ/H|>]=*=dr*6??(@`?4@ҹ?zݣ>.w=q|>aZXYa?Y 㾘t>׾:5"n N1|>?F3f?6fq̴Lt?l?mG@u?D = M0?*e?X>WG?]?1e~?8Ծ6kzT?|sC?zV`۽͡}>')@9J#Lb9!^F?~tg?w>L΋?˾x>f\j='<~K?zKf\>?6п>\#_>P?K.? >Rp?ֲe? BP\tu>uZM?#?m|~ $B?p>?E>?  ?S;?!HӚ T?'>?潿O?׳žɿP?rP>h&X?r?vqL6־ޖ?r&%ռH>j?Ԩ?x?-:???ؾF2%5>~Oɾ r3>{*? ٚ?*1H?Lm=MDIz?쇿rGؿyO? >`El>>(Q?[6=I-ʿY^>ɿ8b>X/?yx>v~eN\^?PLj I?gE<SѾ{{վ,?)ɼ4?U?3Y?ZG>\x0OM"a??JIy>`(i~@`?UD>Hv˾'?S<4{=Z2T>??7~>YQ=.¿>)?RU=7>r?Q.?;G ?t?>#i?ȕY?2?؅?45mo?P3n@?p>y%_F>=>k,? ?N?/E>žz&`jN@?@P?v]?hҿ[Ej?Q)@FOt%Hj?C? ){?(2>T>y>mi>^:*Nqą?aX5 ?d2=?)+?fI>}?$?$N5<8?u" C ?a?JލJ,>)?>?[5EV>Gu`ziIͿl^@ȾS?*VO<Ʈ>Wp"bi>_8"?įVῘ-MlC={,?H#k?y=H"Y">0k=Ij?0Ǭ? m?3 \뇔 -??."=>c?q*gKW?i:ƿ|i2O1chx$?j? ?œ\g=v?^&1>ȅ>2>Úr,b#p[>p'z>?l>?{V1> <ݰ?ݘwL\?=?;E > ?e,S=5l>N\? <Ϯ$>3ī1$v?zJ)o?kF? c??|n ?O^>1iл V?Sug?8@zN?{Ok>H?Ȳij? ??0?)M>QӾsH@??쌾W|⾖B?I׿+_?b&?BIӽrb?p?A?A}?Dj?<}?=>>k8'|g;??+ܾ_@k>N?1[>a_? &L@vs>niͿ0?u;!?,?]q?X"꿔?~󝳿꾒>>" >n?Ā?U=fo%?o˾N ?ϲ:)>{?@?>=NѲS>t?=ۿ{c>02?i?58C?????k}ᕽLw=?F> [?㠠7H??g2i?\ԩ> -H+@$Uh>Z?/ _>ۈ8yRB?҆e?k?"?O[{Ҕ?¿'5y?m?'ad??OgD>]2p>6/Ryپ:[ ?.J4M۰?:`jV)(Uy-ើ₾?+c?g ?GX?Rr]>F,񾒀??o:Cпm#?@%*?ӽhB[n?k?ٶɊ!j?w?-Z6H hڇox?q=7>9?½@VQ=֙->N-?ߌ?st>.s?^р??׾|GB>Z?1-J?筿Ȍ ^J>c`?M{?dʾ]?= &?:?s X}".m4?t?M??|K)?/{(UwO? =?0E{C?)>,!"4W)?|@)ȿe.>g?oS>^q>[]?Cx־Pr=p>Eѽt?e#1?2҅??mVî$?֘>?8?ԼP;܏Stg@Eu?Q"@/?C=Hy??쿽UʽK~̾><>~>ۿ?!>2> F?L?oɗ uӨ?࿤>?50;>1 Rl?C3xf.?'J.>K?nE)=W>aB?ǫ=澸]X?eR,?2>#dC{_Ӿw?N)<$ҪT3?Db:>Ao>yv|px:?qu͛]? ?-uB?!-U}A=@?Dp>Ķ?(`w\񒾖?ц TD=I?@]=p>>h sbFkcd>ŬJRV/Ͻ٪ͯ?ED?߀|?0?b?HTM#bf 1[HJ:p>-I+>G>wu 2{?پ,>U6Q&>5TO50G]zh#?6L.BVw>@^󀿊ˏ?zxAm7?L?DE??>5^!=@?f߄ $?9蛾F,?j?@Q >=>rT?ߊ?>녤&?c>|?>>721JIu?@+YzX>J?>x,G?u@8?V?g?sQWտ-?X?[P֛>2?L= ׾թDf>9?ō=`>ּU[Ci Sn@F=V>R?7 Ͼ"?J?HU{/l?G#Ds<0(>z3 D#Zj]:>V9_?Llgd?=hC,'??u;_/?dY4=X@=FE?%޾9>1@^hH?0Me?f|F?9?kRR`?TE>?񼒿)*>0Z?V?#M>?1>Ǯ0=#>`?RL=O>P<&s6??] ־ >@J?%]鿯4@̛e?Fr>mh >%|x ;b#>}$H0?N?[>>=?2MC? c>;?>"?Дǽ~K?{uz6T?-Aֽ:>P>ҫ?ޭl&>oz?Ѿt>t2?x۾G?>m1? ?>gI?IU?ꐿ @ЀT>ȑȾU?ܽ,*?*`??\a>{?f@2AĿ@?g?& ʿ?`3?"?I?JTDd>7?q׾gB؍#0:ՠ?{IE}Fe(>2A;nj_PMԿ徛+?/?p?oW1?? ? >+8z?u=6tBfumlN?nݿ?&Ԝ?iH kB?ia?k9?w@-+>ʕW&M>$O?O>?7=#FU ?~| 6>w>>5{(>o??Fq?=lȖh@cLu?߿`>P徂?>=!??: c?ǒ?l%wv@T?`r?b'`G>A>u=eM?ˇ7?PG>r_?eD?LʅC>pd:?\Kk?޽B`4 }>a>=?K7?I> @ym'x?A>Dxj?T a5m?[{ ?+H?>3f2>>>\6?u&?99羨?%>@a6?'?ks?(4>q?Z={< >>P?KEkT>F@0ts?2"??>#ľ;Z^VD%?Jf?h\-?]?/=*c?-`? >/]?CM> /? (a]?WWj@h?C}濁X4=?>?HƿoB?柾R,7I oFL,?,>?+޺sj(> m?|:gc_SXhN><>brG?%z>a=yW?S [B??1P#ʏ>Aq>SL=,#=i vx??K?-?q3Ž=*"~w#CCyrPI?bx?? f==L=?ݎI稿 ?>rK>3b>->v苿!?j?>>2?x)>;8ە?z=kA>jQLKB0}?١fc9>&}d3Rqr?C>ķ>'F>0?^A?0Y>ts0IP_>~[ꚿ俸A?2?{@?샾TdVȿ7?y?M?:>ͤJ?)=4?8ocfO%Կ1?ھ>GQ??]/&0? e?N=J?[Rÿ$``=%5 <}>-Ϯ?H W?9?P>I0޽l4?')?:>>M> s_?u>r}Y?8ŭ{?:( ~>[gF?bG?F@g?vIʿǪ?s%W&0Z>(4lN>?b'=¶{ f>=:?Y?f`?84A,!`rOžY\?%IK>5e =jS>nJ>YG/?ͪ>>R?G/>Y6B͢>k<?vH?V!};?Y?g,H!dz~XFW?a=@jE$@0Bʄ3>z(e@?ĿEV?Pd%GĴHN?=?L:>ח>,5V?Zd??gQ'=;r?A? %> >>#Xҋ?۾=?h)K?G>bJ<X @JT0xZ<׺>v&>{? ??+Ѿ!! *;?O?+>^M=3o?$4i?h>t2hI?=o׭>Ml cvD?vҏ=V?j@!?cW?K2>lɾVi=s*Zۿ+tj^BH>UFĿ%>쿧v?+n=a;7?ֿ1r[?|/> =?ỏ>>>2>b(?=?{-jۤd>›_/@'ѽME?\O).>Ӂg?ͿD?x#TE9?_Q?zh>3t>~=tշ>#o??Sc\K @x }`?V??+?UN?>Bb=,= ?>{?/3?Zr?πzc:F>n]?T߿oTqþ>@^?5;?/>01?C:?xB?VsuP<HXy>ۅ?[x({V@6.< s?TU>>>O? D?пk? U?fe>ڽ ?;;?|)?9? r濤ݷ?{!?Y!?6e$?w>M>a?d?+8>=o>a??o,>9׿7bX1&?) !Dz>p?@ѾuD?4>i{qpmVP?ʬC!H?<+>$/SѿoFRG˾U'>?ly?^8H?ܽ.?m^?#0Q?DY>O u?G?==)V?~@տ7>8ѿ>J5B?g?p?=Ⱦ忴L?D󾒔??n1 qQ><>hwJ#?v:O?>TG @?j>=ȾڽjE!V=1@?X!ȏs}xu6?&?N*Ҍ|?iॾZ艿˖žmi֒?}>>{?p>G0 LԾz ? DF}SP~p?r?H>gwC?> >a4S*?@)Ẳ?肋?=oϿ\c~>d$w?PH显?KEb?f&0HQΛ&>Y? ӿW9>y HU??=y8n>KZϽ>Ey?% ?l>.B?<=:@ѝi<݇_>N>=>e?-<;PnR@,>P"^>͈W3@Yٽ>+??D@2_??Z?$>uPnyOO2ܾ=WMKro?>w쿴Fv|g?R =V껾8D6w??r?b @ʟ?3-\cl> s^7?Ve ?}0>H!j"{پ%.v>@i1Md?s,?SKgG<=#ұ/@D+cξĿT6[";#$@b?>? &8\c?=%Y?KZ?cG?F??4 ۾Z]?X?c=V?>J?(@ar?f%_N? ?بN=r ?Q Tr?|ٹ?j?=t!#?}k?u4?T=00=h>eX ?0=rs ?>#@'J?ޯeƼ?ſM?b$??:Р?u?V1ۺ;VÇ>OA:{>^?r>:A&a> ?C)(?~>v?QkI?v?zKuQnDccH:= ʿߓjc?T7 >y- ( <i>= ??G.XW?M-?̳˾Գgz=ұ??[>I'V>.?.þ뾋վ\]N?iY?ױԾ$p4`?.7?YvEݾ?986]?">*9ƾ/R4==h¿c(t??K>:~R?nH'KVn?澦f=>??>I >Կa>>v^.A?,8?i=ͼ@qnbh>}=??ξ[ v?GdGj>q!R? ?$8L"?߾d>3&qً{5 T??zxJw<'@+ɾV? pz֛ eþ?=>9bM=]>Ӎ$9?My11%=(G>!7>d?i?O?ʿ O; ?tA1? ?>> ??G[<Sxڽ ">)n4 @~?K?>7==>?>vÿ˘?+{???B8?{a?`ٸobv?-!]? Oyվ{0龄!??$> ?ͤ>Q?(j5)x`>ccߛ?\ٚw?<ɿſb[+ʤC\~0==.C ?KiX"?]1w\!>F'?W7?t(?1틿0?@]W?;rMݼQ7?M0@!}ſ9?>kt?Y&T9?yԒ>^>tB?ٌ)`ξPth?GH56?|Ϳ?IEdx?3h>)eW~?_dۿ0pLx?;?;$?Rp@#/)??I?>?Sz'~?Z=;|? %x>ƯĶ?G?ۻD?O?ik?JAҿM k41=u?=2ʿ(󔜿_?<:>?>>,yX<M{ G>ֿ'_ǁ?[?; [%q>dG?'>x?T>>4)}??!_@ fNP>9?r?q;ʿn2?/?t¿d?#S?2>i3?`btT]ˇ^ݽvBRaRw<2=<>9> CI><هQ?C?,>}-{?fɨ]ZȳֿҿX>rf^'Wݿt>%?`c>}?ݽ>:Y >?H&MB?OmY?c{o?X?q̌Ծf=2e??LAVFʱ=dz ?KpE,T;*6>@Q΁ ؝>|Ӊ>Cٿɻ?\_?,?gx?ۧFuѩGib>rCe ?E@k&_:w~lw?$?۾K?dݿ׽Y>gNt?SxM?^=1֚ۚ5%B?t?޺? >QrV?)<?͜>"|%>@Ӿ=>F?Y0"P$?5D$վ> >̃80cU \=t| D]׽8(>zWW"=&?E>>id=~=zM>?T?i_ݿ?Y=??pɘ#? x+>j?'Wqk?~g X~&>㿕>˾ɝ`l>{un=?k5>#￰4]?j~u=jؿ4\Α?Fv>U bl?{>v=3(SU]ykƣ<">Ѥk?ݧc?bxֿ=MNJ?c>T;r_??؈=?;> T?f~(up?>Uf ?Pl?X>t?*t?8ۺY'\?'=¥c>줾n ̖>緲;F->@W> >\g?9}; >`? =$??S?~!9@LQ[<`?_ k9)??T󽟦3X\_ 1E?h:>XB>S?b3ȿ%W׾ܾV7 >==վDN?\j c58hgB*l ?@[iOȟ?75J>ʿ?9?ˡj>wx?.޴?> ͹$k>=D_?ش&SM>?>=Yʿ!qʾ;=5U>>?GǾHU7?~?>\>?={?Ot>N)?P?̛î??0 !??>> ? ߄.N[>~2>G־b^]>8D!ev>{-w2l?4?-e G2Sԗx˽؛?P@?;px*59?Jtǜ=H?ΚyI+sܾDoYg?}=F?L?`?$?i&ɾ9?o=8|?ཟ?ҁJ?o N?8‿%ۊ @?kn?KI?†rHi=>%,?E@i?5ھJ0M7;F&s{?^?>p? =rƿƿ?jr̮?J_['No:cQC- bǃ{~>+N?_dl7@ @ÿjX~?e>]%?SU?Ȕ?^NQTv较lJ h?a~?X? ?el@b?K?*>#6?%͹j= ="ɺڿ A?g#?FXҿW?Kr'Y>t?#|>@T?տ>yԋ?{# # ?&e?M??c?xɾwpN>4?QJtd_E3̾E%?$(2?Tn۽)1;S Kʐ?{ ?b3Կl=h0O: c6ʶf?zп~gc'.uL??=׿.)H>pHԇ¸?Jq_{lS7l>B?ZO B?VcGK?e|ɿ??u>(?PK)1&?E 6qEF@Nj>>rٕ?DO>!?3ĿO/>=՜?MQ?~QS>WK?ቿ_L%lF`">6>z>GZ>'nyQ<"?n,Έ?=MЩ?g፾;N?? > < M?WV?*f?9?q.>L)+GC̴?з9>GcZ[Ὧz?V%;?]>s?I?κ?>'d?t&QT7 ?|FzS?o->c>qÿ󑓾8`@UQUѾ2b9z;?H+ῬC7?3x*J$$~y-1pK?tr?=#j(@?d?nVPt<H?ڟi>9?>ҥb>FFg?ـ2E?C??֛?J?Y5>~*o{|?8?:p=?g?Sb/e??ϐξU?s5?3?N??AQL?6¿OM ?icH>5̿ct?WdaA?s6?y:OwοiE\u>, @OQk"?>>Y#>J='$?}>k>#ZU@ ?a1?ֿH>?ٽ9 >rJ>w=LI?&>g`?*'?)<_>ڪ>承$Ɗ?3=k~}?L?X?*?D$(? $?ƕ><>&G٢=SLxb>U?OxRR%G]>"&Ͼ:ۉ=I?z=΅>XZ>26>LT>ã?͏%I^> J̾՜Oĭ1>QH?󽧁0Y?D ?M1wd{=]>vI?Hmv#?eD˿?<׽YȢV& x?9g>n?T&? ??v`;v?pu?bVY?b ? @"j?">[ſW3ER<8ԾN!I~?Xk?k~7z?:.C3?ͪj㸿6?3,?B?!PؿL WfY6>&?@CL?N>V?Zd(Ta?P(?C>>?v*>5>b>AٓZ?(%>Ԗ2L>T?f?Xܾ=c>b=*2HNܞ>R?i>zӌ?v\.1bտ?m?>p-?&/=%>a>*m)?(HvoD fK?D.beYz?!*<tMuR?=8Il/sɾ(?z?l 6uC?}Y?HN ?b>-qHi"*&[?}?׭L?"@t?p>i??!?J?:I1@]?i_>?Ig?`Q?ѐP?ξ90ܾ@tQ24 7OC>#;+/%~!ݿ\±>OT?ܧ? |ý{T?S=KԿ'(Vf?Ͼ,;1?9?@ܳz???!75!+1?l?`q?6L>>w>V}? ?!N܅E9?hO?oטF?R=+d;'վR>,??n?/$6??~>8x?6)?ϥ>/Y)?l?t~<]b?n>l@J>}uxIZ=|?ؿ |Dns0 Ȋk> $?r%WZ>D޽L>ėɾ`J?^>,?yeb># ?F>˿ ?uw- ſ*>A`z>T9?6pQ;3˿0?^!>+O?!ه? |$rK*6U?a>>P`@%d? >*?2 >=>Te?Қ} ?̾* Z??X}>z>/U?Ϩ>[ۀ>FFƇ2! ?>jwݾ;]H91/@`݉}?rQ8}># H ^\@?<'@1?L.?mlԿAӀ? ??˸˹?:?Ϳʿo[]珿f'?P>@cAv? b>Vn޿i? 2?vo@?r >g?ߨP \2?$F'?S">b7e?ӆdR?&Ľ2&B>=EϹT?5Tq7kM?=^/?Ld?-:_ eO>Yץ?C8罴He_.ץӽ?6c?MDqW2 ?Z+?Q>Cx>pZ?C]{;Hj͒?9#>Af>3f3?0?lJtݩ>=pFdxC?I?= @gETҾ'?uMpa|(>"?1|?`?>ّhpW=&5ɿᣓU@>?>-?>yۿ}lCl?m0)>xH݆g?)S>i?soV`a> 5>w>h?ՔqvOswQ咿O]ȾǓ#U5>:Pp[??)?0z?ZtA?k>[f;=f>>Z>|;y9 ?ѧ*7?3??c>C?`}?=>ۖ0}fգ)?ۧ? ?N|:><Br?wVH? ?z>z>h ,?q2u?'˿A'Gž +Oj?>^ H[֋?p?J1?X?G> o>W@f9>k?Y3?ąߦ>q&þE|?WHa??1?p$?|G ?n/=w? ?-->O>?sԅ=Qvܙ^뎾>ZQ.?q;EB>ؤX>%cʾImyK?tj?)u?ߍ?FZm?si+`?=+1֋=?0 ?Y?濐R?t R> >x?vp\,?ξG?ri@>C>ip?=KRvj=9Ͷ }>y>NNF?k(@tE- ?>ܛ6a#?bBx`x+H=>ݽFN>cھmp ?9ۨ=[7nҾ>RJ2?>)־Ͽ$P ս/ͽ`Ɋ>/~΅?>ۑ?/6?]0=NᴿM+5o_>e?!?ҿDCD>?L_,?Ѽ?9$ Z?ey|ׯ? >ىV=d+K?xv>yN@f) ?4N<& g>?.??4`?ϰ%TWe>wXwo?g>*??~V`Ҿ?S?9`?@= >Mv'ɽ?8>?[U~f= %?釲?vo>Xؾa>?> ҃?l>r**Zj>)?:>$<)?iC??€e=9U?b~??_> վ?܀=(뾈kǔR8c_>z qTj f,߀z=n ?Z;3m^䢽Qa=Ƶ^-?s!f? h󿾖ľV?]4D;.ĿW꾆"Qxk{=@@e /z= EsҾRyxCõԾï?"nܜg(ϰɹmΜoJ"?֍6?:B?xvB"5S$.?Q??=3 !:?t2?"m? Z$u'_6=DhP?WhT b?؏4>xE<Ϳ7?p߾4`w>œ?sN/@ǿ?hbD>>?5?7-?ɻ?K百w?>Y 0@Qſ>x 㬢& ?_[ `̾a3U?>A=$wz=š@*=Zj-AKe>ʾZN>>x ?gK>_r˿3!>T?:2Ng?n}xa>ver?>91?uJ?,>t>>-ܾWd?'fY)^@/>=,? q>\8d>˃>n((N=ۿD_q>?$>Ba#->sܾ^h=j*?!>7˾䳿VYܹE>8Ǒ?w̾Y PNԿ B@lS<[Of?jgoa쀽,xn>=>.yJ#}4?IԾ䢘Ht>^3ڿC>$>:侜(?@4 .0ɜ1 Y\{? D>EߌIU>NyE?|?I?l?7 @g=?w۰6>c;;?@N? >sY?> ?򁿥e>x|kܿE?>4W@ ?J=->r?Y/h>+j ?? E?? e;\T>s<T4h Om: ??z ?Oۼ?;?Hp>`ܾ?d"Y?5>l?P9 B?X#>޴ ?YuʬF?>>I!)?>J=q׀}e=>i??+?? gݦ?1c0"=:ɾnľ6e<ֿB?`B?'ppU-=:? ?w>sZXI>|=W;1=h[N&?]k>?E>G\?0K?89N?(#?ȿ+ "۹:>f]i)?nfL?J9?? ޿5=\?y^W?i93>KZžҿ(@P?#X>4]6 ?h7͚??ʼnnL~><=戾lf?$m<~A?Œ?t?{ #ο0>?[Eo>?^bݽC= F ׽ :"|ye'c8??3޾ #sZX?Y>I0+~]ÿ8 鱾Oq?",w.t獾Ҿv?(>= @ @Cd>.̴>:= >?ھQ>r=^ȏ?X9?>p(?n>+ S68rSdg_:ؕ;?ׇB?̿g86Ic?p@@%^>g>k>HѿƤ?> @}2v-?jv>+dJ<{k?gܽy?f7W?'?FFb?io?R"ג;9 ??ˊ?%Q+?/? .?4@t0 V;+?rhѽo @;?p޿i넾S>6>QTo?N9Wu7=?xֱ>`?ő=&k!>HP?I!n[5/{n>C&v,>X#)?+6;?ˋCO?a??c @;&?{ƾ m>iC>KS ?>cSR?G4|b? O@>TGjп ?,?lw>ˊ;?Q͐d*ҿ-?ކ>;Kw{?LEݽH?ј,UI=@6Kr,?˻?}%H䏾P4[>{GJ>̱0a7J xM0?޷>?̀=Y7f>K= zb8>з??7m}?Sʼ97龂 ?!ܣӭP<@t%"L =3z?s!?eK?b>?~?]o3?url{FXS?O[>Z;?d*?_=G> ]?5!z>P?)D@@?[¾hR@I>?)F>̀?)9ED¿e>T1=P0?b<_>٬@G>[d ]?[|.`9q>5?a ?H? ? ?R?㾑=a,* >m ? ?Rhn