summaryrefslogtreecommitdiff
path: root/yjit_codegen.c
diff options
context:
space:
mode:
Diffstat (limited to 'yjit_codegen.c')
-rw-r--r--yjit_codegen.c18
1 files changed, 7 insertions, 11 deletions
diff --git a/yjit_codegen.c b/yjit_codegen.c
index 061cd1ead7..8c888fd53a 100644
--- a/yjit_codegen.c
+++ b/yjit_codegen.c
@@ -554,7 +554,7 @@ yjit_entry_prologue(codeblock_t *cb, const rb_iseq_t *iseq)
const uint32_t old_write_pos = cb->write_pos;
- // Align the current write positon to cache line boundaries
+ // Align the current write position to cache line boundaries
cb_align_pos(cb, 64);
uint8_t *code_ptr = cb_get_ptr(cb, cb->write_pos);
@@ -640,16 +640,6 @@ gen_single_block(blockid_t blockid, const ctx_t *start_ctx, rb_execution_context
{
RUBY_ASSERT(cb != NULL);
- // Check if there is enough executable memory.
- // FIXME: This bound isn't enforced and long blocks can potentially use more.
- enum { MAX_CODE_PER_BLOCK = 1024 };
- if (cb->write_pos + MAX_CODE_PER_BLOCK >= cb->mem_size) {
- return NULL;
- }
- if (ocb->write_pos + MAX_CODE_PER_BLOCK >= ocb->mem_size) {
- return NULL;
- }
-
// Allocate the new block
block_t *block = calloc(1, sizeof(block_t));
if (!block) {
@@ -778,6 +768,12 @@ gen_single_block(blockid_t blockid, const ctx_t *start_ctx, rb_execution_context
// doesn't go to the next instruction.
RUBY_ASSERT(!jit.record_boundary_patch_point);
+ // If code for the block doesn't fit, free the block and fail.
+ if (cb->dropped_bytes || ocb->dropped_bytes) {
+ yjit_free_block(block);
+ return NULL;
+ }
+
if (YJIT_DUMP_MODE >= 2) {
// Dump list of compiled instrutions
fprintf(stderr, "Compiled the following for iseq=%p:\n", (void *)iseq);