Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Init Test GAN #5743

Closed
wants to merge 35 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
35 commits
Select commit Hold shift + click to select a range
acd7ea9
Init Test GAN
reyoung Nov 17, 2017
19a2c08
Make GAN trainable
reyoung Nov 21, 2017
36d6c7a
Merge branch 'develop' of github.com:baidu/Paddle into feature/simple…
reyoung Nov 21, 2017
8cff9a9
Simple Update
reyoung Nov 21, 2017
eee50e6
Merge branch 'develop' of github.com:baidu/Paddle into feature/simple…
reyoung Nov 21, 2017
505f164
Several enhancement
reyoung Nov 21, 2017
529bc6a
Stash
reyoung Nov 22, 2017
a712cd5
Unify fluid submodules to fluid module
reyoung Nov 27, 2017
012f8cd
Remove g_main_program/g_startup_program
reyoung Nov 27, 2017
8f03f2e
Merge branch 'develop' of github.com:baidu/Paddle into feature/remove…
reyoung Nov 27, 2017
0819ef2
Typo
reyoung Nov 27, 2017
758517c
Add API for switch default program
reyoung Nov 27, 2017
bbcba67
Fix CI
reyoung Nov 27, 2017
952f138
Merge branch 'develop' into feature/remove_g_program
reyoung Nov 27, 2017
f2e9ed3
Merge branch 'develop' into feature/remove_g_program
reyoung Nov 27, 2017
0fc3809
Merge branch 'develop' of github.com:baidu/Paddle into feature/simple…
reyoung Nov 27, 2017
4a67639
Merge branch 'feature/remove_g_program' into feature/simple_gan
reyoung Nov 27, 2017
013eb62
Merge branch 'feature/switch_program' into feature/simple_gan
reyoung Nov 27, 2017
92436f1
Add Python wrap of conv2d_transpose and its unittest
reyoung Nov 27, 2017
90051be
Follow comments
reyoung Nov 28, 2017
9775d3a
Merge branch 'develop' of github.com:baidu/Paddle into feature/deconv…
reyoung Nov 28, 2017
407ec4e
Fix format
reyoung Nov 28, 2017
979eeda
Merge branch 'develop' into feature/switch_program
reyoung Nov 28, 2017
94e2ed3
Merge branch 'feature/deconv_layer' into feature/simple_gan
reyoung Nov 28, 2017
0fc270b
Merge branch 'feature/switch_program' into feature/simple_gan
reyoung Nov 28, 2017
e94ef9e
Fix MacOS compile
reyoung Nov 28, 2017
75248e8
Update GRPC
reyoung Nov 28, 2017
6c19dee
Merge branch 'develop' of github.com:baidu/Paddle into feature/fix_ma…
reyoung Nov 28, 2017
91bfb07
Unset PROTOBUF_EXEC
reyoung Nov 28, 2017
f096e71
Make param_attr as a strong typed class
reyoung Nov 28, 2017
df6a76c
Merge branch 'develop' of github.com:baidu/Paddle into feature/param_…
reyoung Nov 28, 2017
08d15cb
Merge branch 'feature/param_attr' into feature/simple_gan
reyoung Nov 28, 2017
149677c
Add test_dcgan
reyoung Nov 28, 2017
28f6e47
Refine code
reyoung Nov 28, 2017
018a987
Update
reyoung Nov 28, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion paddle/operators/sigmoid_cross_entropy_with_logits_op.cc
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ class SigmoidCrossEntropyWithLogitsOpMaker
"log(p/(1-p)).");
AddInput("Labels",
"(Tensor, default Tensor<float>), a 2-D tensor of the same type "
"and shape as X. This input is a tensor of probabalistic labels "
"and shape as X. This input is a tensor of probabilistic labels "
"for each logit");
AddOutput("Out",
"(Tensor, default Tensor<float>), a 2-D tensor with shape N x D "
Expand Down
3 changes: 2 additions & 1 deletion python/paddle/v2/fluid/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,13 +13,14 @@
import optimizer
import backward
import regularizer
from param_attr import ParamAttr

from core import LoDTensor, CPUPlace, GPUPlace

Tensor = LoDTensor
__all__ = framework.__all__ + executor.__all__ + [
'io', 'initializer', 'layers', 'nets', 'optimizer', 'backward',
'regularizer', 'LoDTensor', 'CPUPlace', 'GPUPlace', 'Tensor'
'regularizer', 'LoDTensor', 'CPUPlace', 'GPUPlace', 'Tensor', 'ParamAttr'
]


Expand Down
78 changes: 77 additions & 1 deletion python/paddle/v2/fluid/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,11 @@
import numpy as np
from . import core
import proto.framework_pb2 as framework_pb2
import contextlib

__all__ = [
'Block', 'Variable', 'Program', 'Operator', 'default_startup_program',
'default_main_program'
'default_main_program', 'program_guard'
]


Expand Down Expand Up @@ -659,8 +660,83 @@ def __init__(self, block, shape, dtype, **kwargs):


def default_startup_program():
"""
Get default startup program. In startup program, Paddle will initialize
parameters, initialize nccl handle, etc.

Returns:
Program: startup program
"""
return _startup_program_


def default_main_program():
"""
Get default main program. The main program is used for training or testing.

Returns:
Program: main program
"""
return _main_program_


def switch_main_program(program):
"""
Switch the main program to a new program.

Args:
program(Program): The new main program

Returns:
Program: The previous main program
"""
global _main_program_
prev_program = _main_program_
_main_program_ = program
return prev_program


def switch_startup_program(program):
"""
Switch the startup program to a new program
Args:
program(Program): The new startup program

Returns:
Program: The previous startup program
"""
global _startup_program_
prev_program = _startup_program_
_startup_program_ = program
return prev_program


@contextlib.contextmanager
def program_guard(main_program, startup_program=None):
"""
Switch program with `with` statement

Examples:
>>> with program_guard(Program()):
>>> data = fluid.layers.data(...)
>>> hidden = fluid.layers.fc(...)

Args:
main_program(Program): New main program inside `with` statement
startup_program(Program): New startup program inside `with` statement.
None means do not change startup program.

Returns:
None
"""
if not isinstance(main_program, Program):
raise TypeError("main_program should be Program")
main_program = switch_main_program(main_program)
if startup_program is not None:
if not isinstance(startup_program, Program):
raise TypeError("startup_program should be Program")
startup_program = switch_startup_program(startup_program)
yield
switch_main_program(main_program)
if startup_program is not None:
switch_startup_program(startup_program)
71 changes: 28 additions & 43 deletions python/paddle/v2/fluid/layer_helper.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
import copy
import itertools

from framework import Variable, default_main_program, default_startup_program, unique_name, dtype_is_floating
from framework import Variable, default_main_program, default_startup_program, \
unique_name, dtype_is_floating
from paddle.v2.fluid.initializer import Constant, Xavier
from param_attr import ParamAttr


class LayerHelper(object):
Expand Down Expand Up @@ -59,31 +61,15 @@ def input(self, input_param_name='input'):

@property
def param_attr(self):
default = {'name': None}
actual = self.kwargs.get('param_attr', None)
if actual is None:
actual = default
for default_field in default.keys():
if default_field not in actual:
actual[default_field] = default[default_field]
return actual
return ParamAttr.to_attr(self.kwargs.get('param_attr', None))

@property
def bias_attr(self):
default = {'name': None}
bias_attr = self.kwargs.get('bias_attr', None)
if bias_attr is None:
bias_attr = default

if isinstance(bias_attr, dict):
for default_field in default.keys():
if default_field not in bias_attr:
bias_attr[default_field] = default[default_field]
return bias_attr
return ParamAttr.to_attr(self.kwargs.get('bias_attr', None))

def multiple_param_attr(self, length):
param_attr = self.param_attr
if isinstance(param_attr, dict):
if isinstance(param_attr, ParamAttr):
param_attr = [param_attr]

if len(param_attr) != 1 and len(param_attr) != length:
Expand Down Expand Up @@ -111,23 +97,30 @@ def input_dtype(self, input_param_name='input'):
raise ValueError("Data Type mismatch")
return dtype

def create_parameter(self, attr, shape, dtype, suffix='w',
initializer=None):
def create_parameter(self,
attr,
shape,
dtype,
is_bias=False,
default_initializer=None):
# Deepcopy the attr so that parameters can be shared in program
attr_copy = copy.deepcopy(attr)
if initializer is not None:
attr_copy['initializer'] = initializer
assert isinstance(attr, ParamAttr)
suffix = 'b' if is_bias else 'w'

if default_initializer is None:
if is_bias:
attr.set_default_bias_initializer()
else:
attr.set_default_param_initializer()
else:
attr_copy['initializer'] = self._get_default_initializer(dtype)
if attr_copy['name'] is None:
attr_copy['name'] = unique_name(".".join([self.name, suffix]))
attr.set_default_initializer(default_initializer)
if attr.name is None:
attr.name = unique_name(".".join([self.name, suffix]))

self.startup_program.global_block().create_parameter(
dtype=dtype, shape=shape, **attr_copy)
dtype=dtype, shape=shape, **attr.to_kwargs(with_initializer=True))
return self.main_program.global_block().create_parameter(
name=attr_copy['name'],
dtype=dtype,
shape=shape,
trainable=attr_copy.get('trainable', True))
dtype=dtype, shape=shape, **attr.to_kwargs())

def create_tmp_variable(self, dtype):
return self.main_program.current_block().create_var(
Expand All @@ -152,11 +145,7 @@ def set_variable_initializer(self, var, initializer):
persistable=True,
initializer=initializer)

def append_bias_op(self,
input_var,
bias_initializer,
dim_start=1,
dim_end=None):
def append_bias_op(self, input_var, dim_start=1, dim_end=None):
"""
Append bias operator and return its output. If the user does not set
bias_attr, append_bias_op will return input_var
Expand All @@ -176,11 +165,7 @@ def append_bias_op(self,
return input_var

b = self.create_parameter(
attr=bias_attr,
shape=size,
dtype=input_var.dtype,
suffix='b',
initializer=bias_initializer)
attr=bias_attr, shape=size, dtype=input_var.dtype, is_bias=True)
tmp = self.create_tmp_variable(dtype=input_var.dtype)
self.append_op(
type='elementwise_add',
Expand Down
Loading