Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Bugfix macroprop arithmetic operator overloads shouldn't change the macroprop #738

Merged
merged 1 commit into from
Nov 24, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 4 additions & 12 deletions include/flamegpu/runtime/utility/DeviceMacroProperty.cuh
Original file line number Diff line number Diff line change
Expand Up @@ -102,14 +102,12 @@ class DeviceMacroProperty : public ReadOnlyDeviceMacroProperty<T, I, J, K, W> {
* atomic add
* @param val The 2nd operand
* @return (this + val)
* @note Only suitable where T is type int32_t, uint32_t, uint64_t, float, double
*/
__device__ __forceinline__ T operator+(const T& val) const;
/**
* atomic subtraction
* @param val The 2nd operand
* @return (this - val)
* @note Only suitable where T is type int32_t or uint32_t
*/
__device__ __forceinline__ T operator-(const T& val) const;
/**
Expand Down Expand Up @@ -293,35 +291,29 @@ __device__ __forceinline__ DeviceMacroProperty<T, I, J, K, W>& DeviceMacroProper
}
template<typename T, unsigned int I, unsigned int J, unsigned int K, unsigned int W>
__device__ __forceinline__ T DeviceMacroProperty<T, I, J, K, W>::operator+(const T& val) const {
static_assert(std::is_same<T, int32_t>::value ||
std::is_same<T, uint32_t>::value ||
std::is_same<T, uint64_t>::value ||
std::is_same<T, float>::value ||
std::is_same<T, double>::value, "atomic add only supports the types int32_t/uint32_t/uint64_t/float/double.");
#if !defined(SEATBELTS) || SEATBELTS
if (I != 1 || J != 1 || K != 1 || W != 1) {
DTHROW("Indexing error, property has more dimensions.\n");
return { };
} else if (this->ptr == nullptr) {
return { };
}
this->setCheckWriteFlag();
this->setCheckReadFlag();
#endif
return atomicAdd(this->ptr, val) + val;
return *this->ptr + val;
}
template<typename T, unsigned int I, unsigned int J, unsigned int K, unsigned int W>
__device__ __forceinline__ T DeviceMacroProperty<T, I, J, K, W>::operator-(const T& val) const {
static_assert(std::is_same<T, uint32_t>::value || std::is_same<T, int32_t>::value, "atomic subtract only supports the types int32_t/uint32_t.");
#if !defined(SEATBELTS) || SEATBELTS
if (I != 1 || J != 1 || K != 1 || W != 1) {
DTHROW("Indexing error, property has more dimensions.\n");
return { };
} else if (this->ptr == nullptr) {
return { };
}
this->setCheckWriteFlag();
this->setCheckReadFlag();
#endif
return atomicSub(this->ptr, val) - val;
return *this->ptr - val;
}
template<typename T, unsigned int I, unsigned int J, unsigned int K, unsigned int W>
__device__ __forceinline__ T DeviceMacroProperty<T, I, J, K, W>::operator++() {
Expand Down
14 changes: 14 additions & 0 deletions tests/test_cases/runtime/test_device_macro_property.cu
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,10 @@ FLAMEGPU_AGENT_FUNCTION(Write_add2, flamegpu::MessageNone, flamegpu::MessageNone
FLAMEGPU->setVariable<unsigned int>("b", FLAMEGPU->environment.getMacroProperty<unsigned int>("int") + FLAMEGPU->getVariable<unsigned int>("a"));
return flamegpu::ALIVE;
}
FLAMEGPU_AGENT_FUNCTION(Write_2check, flamegpu::MessageNone, flamegpu::MessageNone) {
FLAMEGPU->setVariable<unsigned int>("c", FLAMEGPU->environment.getMacroProperty<unsigned int>("int"));
return flamegpu::ALIVE;
}
TEST(DeviceMacroPropertyTest, add2) {
ModelDescription model("device_env_test");
// Setup environment
Expand All @@ -134,10 +138,13 @@ TEST(DeviceMacroPropertyTest, add2) {
AgentDescription& agent = model.newAgent("agent");
agent.newVariable<unsigned int>("a");
agent.newVariable<unsigned int>("b");
agent.newVariable<unsigned int>("c");
AgentFunctionDescription& initFn = agent.newFunction("init", Init_add);
AgentFunctionDescription& writeFn = agent.newFunction("write", Write_add2);
AgentFunctionDescription& checkFn = agent.newFunction("check", Write_2check);
model.newLayer().addAgentFunction(initFn);
model.newLayer().addAgentFunction(writeFn);
model.newLayer().addAgentFunction(checkFn);
AgentVector population(agent, 1);
population[0].setVariable<unsigned int>("a", 12u);
// Do Sim
Expand All @@ -148,6 +155,8 @@ TEST(DeviceMacroPropertyTest, add2) {
ASSERT_NO_THROW(cudaSimulation.getPopulationData(population));
const unsigned int t_out = population.at(0).getVariable<unsigned int>("b");
ASSERT_EQ(13u, t_out);
const unsigned int t_out2 = population.at(0).getVariable<unsigned int>("c");
ASSERT_EQ(1u, t_out2);
}
FLAMEGPU_AGENT_FUNCTION(Init_sub, flamegpu::MessageNone, flamegpu::MessageNone) {
FLAMEGPU->environment.getMacroProperty<unsigned int>("int").exchange(25);
Expand Down Expand Up @@ -198,10 +207,13 @@ TEST(DeviceMacroPropertyTest, sub2) {
AgentDescription& agent = model.newAgent("agent");
agent.newVariable<unsigned int>("a");
agent.newVariable<unsigned int>("b");
agent.newVariable<unsigned int>("c");
AgentFunctionDescription& initFn = agent.newFunction("init", Init_sub);
AgentFunctionDescription& writeFn = agent.newFunction("write", Write_sub2);
AgentFunctionDescription& checkFn = agent.newFunction("check", Write_2check);
model.newLayer().addAgentFunction(initFn);
model.newLayer().addAgentFunction(writeFn);
model.newLayer().addAgentFunction(checkFn);
AgentVector population(agent, 1);
population[0].setVariable<unsigned int>("a", 12u);
// Do Sim
Expand All @@ -212,6 +224,8 @@ TEST(DeviceMacroPropertyTest, sub2) {
ASSERT_NO_THROW(cudaSimulation.getPopulationData(population));
const unsigned int t_out = population.at(0).getVariable<unsigned int>("b");
ASSERT_EQ(13u, t_out);
const unsigned int t_out2 = population.at(0).getVariable<unsigned int>("c");
ASSERT_EQ(25u, t_out2);
}

FLAMEGPU_AGENT_FUNCTION(Write_postincrement, MessageNone, MessageNone) {
Expand Down